Hi Franco,

Thank you. I am going to try that right now. Appreciate it.

Anthony

On Aug 9, 2021, at 8:47 AM, Franco Peschiera 
<[email protected]<mailto:[email protected]>> wrote:

You don't often get email from 
[email protected]<mailto:[email protected]>. Learn why this 
is important<http://aka.ms/LearnAboutSenderIdentification>
I don't see you have configured the AIRFLOW_HOME environment variable. Try 
setting it to the absolute path so that it can find your dags. Also, there is 
an environment variable where you can manually pass the path to the DAGs 
folder. AIRFLOW__CORE__DAGS_FOLDER

On Mon, Aug 9, 2021, 14:39 Anthony Joyce 
<[email protected]<mailto:[email protected]>> wrote:
Hi all,

This is my first time the email list so I thank you in advance for the help.

Here is my situation:

We are running Airflow 1.10.10..a pretty old version at this point without a 
container and built from pip on CentOS. Instead of updating anaconda and 
dealing with dependency hell, I decided to download the official apache/airflow 
docker container and try to configure it to my already existing meta database 
and DAGs. It seems the container initialized successfully picking up our 
Variables and Connections via our existing Postgres-13 meta database with all 
containers healthy at this point. However, I am having a problem connecting our 
external Airflow DAGs folder (~/airflow/dags). I have copied the information in 
our ~/airflow/dags folder in the ./dags folder but that doesn’t seem to help.

Do you all have any advice/suggestions regarding this issue?

Here is the redacted docker-compose.yaml file below.

Thank you for the help!

Best,

Anthony

version: '3'
x-airflow-common:
  &airflow-common
  image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.1.2}
  environment:
    &airflow-common-env
    AIRFLOW__CORE__EXECUTOR: CeleryExecutor
    AIRFLOW__CORE__SQL_ALCHEMY_CONN: 
postgresql+psycopg2://airflow@{host}/airflow
    AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow@{host}/airflow
    AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
    AIRFLOW__CORE__FERNET_KEY: {key}
    AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
    AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
    AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth'
    _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
  volumes:
    - ./dags:/opt/airflow/dags
    - ./logs:/opt/airflow/logs
    #- /home/etl/airflow/plugins/:/opt/airflow/plugins
  user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-0}"
  depends_on:
    redis:
      condition: service_healthy
    postgres:
      condition: service_healthy

services:
  postgres:
    image: postgres:13
    environment:
      POSTGRES_USER: airflow
      POSTGRES_PASSWORD: airflow
      POSTGRES_DB: airflow
    volumes:
      - postgres-db-volume:/localdata/pgdata
    healthcheck:
      test: ["CMD", "pg_isready", "-U", "airflow"]
      interval: 5s
      retries: 5
    restart: always

  redis:
    image: redis:latest
    ports:
      - 6379:6379
    healthcheck:
      test: ["CMD", "redis-cli", "ping"]
      interval: 5s
      timeout: 30s
      retries: 50
    restart: always

  airflow-webserver:
    <<: *airflow-common
    command: webserver
    ports:
      - 8080:8080
    healthcheck:
      test: ["CMD", "curl", "--fail", "http://localhost:8080/health";]
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always

  airflow-scheduler:
    <<: *airflow-common
    command: scheduler
    healthcheck:
      test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob 
--hostname "$${HOSTNAME}"']
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always

  airflow-worker:
    <<: *airflow-common
    command: celery worker
    healthcheck:
      test:
        - "CMD-SHELL"
        - 'celery --app airflow.executors.celery_executor.app inspect ping -d 
"celery@$${HOSTNAME}"'
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always

  airflow-init:
    <<: *airflow-common
    command: version
    environment:
      <<: *airflow-common-env
      _AIRFLOW_DB_UPGRADE: 'true'
      _AIRFLOW_WWW_USER_CREATE: 'true'
      _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
      _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}

  flower:
    <<: *airflow-common
    command: celery flower
    ports:
      - 5555:5555
    healthcheck:
      test: ["CMD", "curl", "--fail", "http://localhost:5555/";]
      interval: 10s
      timeout: 10s
      retries: 5
    restart: always

volumes:
  postgres-db-volume:




Reply via email to