saeed2402 opened a new issue #14523:
URL: https://github.com/apache/airflow/issues/14523


   Running Airflow on Docker Compose on Windows 10.
   Docker-compose file:
   
   `version: '3'
   x-airflow-common:
     &airflow-common
     image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:1.10.12}
     environment:
       &airflow-common-env
       AIRFLOW__CORE__EXECUTOR: CeleryExecutor
       AIRFLOW__CORE__SQL_ALCHEMY_CONN: 
postgresql+psycopg2://airflow:airflow@postgres/airflow
       AIRFLOW__CELERY__RESULT_BACKEND: 
db+postgresql://airflow:airflow@postgres/airflow
       AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
       AIRFLOW__CORE__FERNET_KEY: ''
       AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
       AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
       AIRFLOW__CORE__DAG_CONCURRENCY: '16'
     volumes:
       - ./dags:/opt/airflow/dags
       - ./logs:/opt/airflow/logs
       - ./plugins:/opt/airflow/plugins
     user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}"
     depends_on:
       redis:
         condition: service_healthy
       postgres:
         condition: service_healthy
   
   services:
     postgres:
       image: postgres:13
       environment:
         POSTGRES_SERVER: db
         POSTGRES_USER: airflow
         POSTGRES_PASSWORD: airflow
         POSTGRES_DB: airflow
         POSTGRES_PORT: 5432
       volumes:
         - postgres-db-volume:/var/lib/postgresql/data
       healthcheck:
         test: ["CMD", "pg_isready", "-U", "airflow"]
         interval: 5s
         retries: 5
       restart: always
       ports:
         - 5432:5432
   
     pgadmin:
       image: dpage/pgadmin4:latest
       depends_on:
         - postgres
       environment:
         - PGADMIN_LISTEN_PORT=5050
         - [email protected]
         - PGADMIN_DEFAULT_PASSWORD=admin
       expose:
         - 5050
       ports: 
         - 5050:5050
   
     redis:
       image: redis:latest
       ports:
         - 6379:6379
       healthcheck:
         test: ["CMD", "redis-cli", "ping"]
         interval: 5s
         timeout: 30s
         retries: 50
       restart: always
   
     airflow-webserver:
       <<: *airflow-common
       command: webserver
       ports:
         - 8080:8080
       healthcheck:
         test: ["CMD", "curl", "--fail", "http://localhost:8080/health";]
         interval: 10s
         timeout: 10s
         retries: 5
       restart: always
   
     airflow-scheduler:
       <<: *airflow-common
       command: scheduler
       restart: always
   
     airflow-worker:
       <<: *airflow-common
       command: worker
       restart: always
   
     airflow-init:
       <<: *airflow-common
       command: version
       environment:
         <<: *airflow-common-env
         _AIRFLOW_DB_UPGRADE: 'true'
         _AIRFLOW_WWW_USER_CREATE: 'true'
         _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
         _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
   
     flower:
       <<: *airflow-common
       command: flower
       ports:
         - 5555:5555
       healthcheck:
         test: ["CMD", "curl", "--fail", "http://localhost:5555/";]
         interval: 10s
         timeout: 10s
         retries: 5
       restart: always
       
   volumes:
     postgres-db-volume:
   `
   
   First ran "docker-compose up airflow-init", output:
   
   Recreating airflow_test_2_redis_1  ... done
   Creating airflow_test_2_postgres_1 ... done
   Recreating airflow_init            ... done
   Attaching to airflow_test_2_airflow-init_1
   airflow-init_1       | DB_BACKEND=postgresql+psycopg2
   airflow-init_1       | DB_HOST=postgres
   airflow-init_1       | DB_PORT=5432
   airflow-init_1       |
   airflow-init_1       | 1.10.12
   airflow_test_2_airflow-init_1 exited with code 0
   
   
   Then ran "docker-compose up", was able to login to airflow UI. Copied the 
new dag to dags folder. Dag code:
   
   `
   from datetime import timedelta
   
   # The DAG object; we'll need this to instantiate a DAG
   from airflow import DAG
   # Operators; we need this to operate!
   from airflow.operators.bash_operator import BashOperator
   from airflow.utils.dates import days_ago
   # These args will get passed on to each operator
   # You can override them on a per-task basis during operator initialization
   default_args = {
       'owner': 'airflow',
       'depends_on_past': False,
       'start_date': days_ago(2),
       'email': ['[email protected]'],
       'email_on_failure': False,
       'email_on_retry': False,
       'retries': 1,
       'retry_delay': timedelta(minutes=5),
       # 'queue': 'bash_queue',
       # 'pool': 'backfill',
       # 'priority_weight': 10,
       # 'end_date': datetime(2016, 1, 1),
       # 'wait_for_downstream': False,
       # 'dag': dag,
       # 'sla': timedelta(hours=2),
       # 'execution_timeout': timedelta(seconds=300),
       # 'on_failure_callback': some_function,
       # 'on_success_callback': some_other_function,
       # 'on_retry_callback': another_function,
       # 'sla_miss_callback': yet_another_function,
       # 'trigger_rule': 'all_success'
   }
   dag = DAG(
       'tutorial-001',
       default_args=default_args,
       description='A simple tutorial DAG',
       schedule_interval=timedelta(days=1),
   )
   
   # t1, t2 and t3 are examples of tasks created by instantiating operators
   t1 = BashOperator(
       task_id='print_date',
       bash_command='date',
       dag=dag,
   )
   
   t2 = BashOperator(
       task_id='sleep',
       depends_on_past=False,
       bash_command='sleep 5',
       retries=3,
       dag=dag,
   )
   dag.doc_md = __doc__
   
   t1.doc_md = """\
   #### Task Documentation
   You can document your task using the attributes `doc_md` (markdown),
   `doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets
   rendered in the UI's Task Instance Details page.
   
![img](http://montcs.bloomu.edu/~bobmon/Semesters/2012-01/491/import%20soul.png)
   """
   templated_command = """
   {% for i in range(5) %}
       echo "{{ ds }}"
       echo "{{ macros.ds_add(ds, 7)}}"
       echo "{{ params.my_param }}"
   {% endfor %}
   """
   
   t3 = BashOperator(
       task_id='templated',
       depends_on_past=False,
       bash_command=templated_command,
       params={'my_param': 'Parameter I passed in'},
       dag=dag,
   )
   
   t1 >> [t2, t3]
   `
   
   Error message:
   
   airflow-scheduler_1  | sqlalchemy.exc.IntegrityError: 
(psycopg2.errors.NotNullViolation) null value in column "concurrency" of 
relation "dag" violates not-null constraint
   airflow-scheduler_1  | DETAIL:  Failing row contains (yarra-demo-usecase-1, 
t, f, t, 2021-02-28 03:11:20.419498+00, null, null, null, null, 
/opt/airflow/dags/demo-01.py, airflow, null, null, {"type": "timedelta", 
"attrs": {"days": 0, "seconds": 600, "micr..., null, null, null, null, null).
   airflow-scheduler_1  |
   airflow-scheduler_1  | [SQL: INSERT INTO dag (dag_id, root_dag_id, 
is_paused, is_subdag, is_active, last_scheduler_run, last_pickled, 
last_expired, scheduler_lock, pickle_id, fileloc, owners, description, 
default_view, schedule_interval) VALUES (%(dag_id)s, %(root_dag_id)s, 
%(is_paused)s, %(is_subdag)s, %(is_active)s, %(last_scheduler_run)s, 
%(last_pickled)s, %(last_expired)s, %(scheduler_lock)s, %(pickle_id)s, 
%(fileloc)s, %(owners)s, %(description)s, %(default_view)s, 
%(schedule_interval)s)]
   airflow-scheduler_1  | [parameters: {'dag_id': 'yarra-demo-usecase-1', 
'root_dag_id': None, 'is_paused': True, 'is_subdag': False, 'is_active': True, 
'last_scheduler_run': datetime.datetime(2021, 2, 28, 3, 11, 20, 419498, 
tzinfo=<Timezone [UTC]>), 'last_pickled': None, 'last_expired': None, 
'scheduler_lock': None, 'pickle_id': None, 'fileloc': 
'/opt/airflow/dags/yarra-demo-usecase-1.py', 'owners': 'airflow', 
'description': None, 'default_view': None, 'schedule_interval': '{"type": 
"timedelta", "attrs": {"days": 0, "seconds": 600, "microseconds": 0}}'}]
   airflow-scheduler_1  | (Background on this error at: 
http://sqlalche.me/e/13/gkpj)
   
   
   Any help is highly appreciated.


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to