schuups commented on PR #61972:
URL: https://github.com/apache/airflow/pull/61972#issuecomment-3938977143

   For context, I am just practicing with Airflow 3.0 for the first time within 
my LAN, primarily for learning about EdgeExecutor.
   
   I deploy it with docker compose. The central instance relies on a 
pre-existing shared postgres within my LAN. TLS termination is done by a shared 
traefik instance. The hosts are two raspberry pi: one with postgres, traefik 
and airflow "central", and the other with just the edge executor.
   
   
   airflow.cfg
   ```
   [core]
   executor = airflow.providers.edge3.executors.EdgeExecutor
   auth_manager = 
airflow.api_fastapi.auth.managers.simple.simple_auth_manager.SimpleAuthManager
   load_examples = False
   parallelism = 1
   max_active_tasks_per_dag = 1
   max_active_runs_per_dag = 1
   dagbag_import_timeout = 30
   dagbag_import_error_tracebacks = False
   
   [edge]
   api_enabled = True
   api_url = https://airflow.lan
   
   [api]
   host = 0.0.0.0
   port = 8080
   workers = 1
   worker_timeout = 120
   expose_config = False
   expose_stacktrace = False
   
   [scheduler]
   parsing_cleanup_interval = 300
   max_tis_per_query = 2
   task_queued_timeout = 60
   task_queued_timeout_check_interval = 60
   min_file_process_interval = 60
   
   [dag_processor]
   parsing_processes = 1
   refresh_interval = 60
   min_file_process_interval = 60
   
   [triggerer]
   capacity = 10
   ```
   
   docker-compose.yml
   ```
   x-airflow-common:
     &airflow-common
     build: .
     image: my_airflow:3.1.7
     env_file:
       - .env
     environment:
       &airflow-common-env
       AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: 
postgresql+psycopg2://admin:[email protected]:5432/airflow
       AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg'
       AIRFLOW__CORE__SIMPLE_AUTH_MANAGER_ALL_ADMINS: 'true'
       _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME}
       _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD}
       AIRFLOW__API_AUTH__JWT_SECRET: ${AIRFLOW__API_AUTH__JWT_SECRET}
       AIRFLOW__API__BASE_URL: http://container_airflow_webserver:8080
       AIRFLOW__WEBSERVER__EXPOSE_CONFIG: 'True'
       AIRFLOW__CORE__EXECUTOR: "airflow.providers.edge3.executors.EdgeExecutor"
       AIRFLOW__EDGE__API_ENABLED: 'True'
       AIRFLOW__EDGE__API_URL: "https://airflow.lan";
       AIRFLOW__CORE__DAGS_FOLDER: /opt/airflow/dags
     volumes:
       - /var/lan/airflow/logs:/opt/airflow/logs
       - ./config:/opt/airflow/config
       - ./plugins:/opt/airflow/plugins
       - /opt/lan/dags:/opt/airflow/dags
     networks:
       - my_services_internal
     user: "1000:0"
   
   services:
     airflow-webserver:
       image: airflow:3.1.7
       <<: *airflow-common
       container_name: container_airflow_webserver
       command: airflow api-server --apps all
       ports:
         - "8080:8080"
       restart: on-failure:3
   
     airflow-scheduler:
       <<: *airflow-common
       container_name: container_airflow_scheduler
       command: scheduler
       restart: on-failure:3
   
     airflow-triggerer:
       <<: *airflow-common
       container_name: container_airflow_triggerer
       command: triggerer
       restart: on-failure:3
   
     airflow-dag-processor:
       <<: *airflow-common
       container_name: container_airflow_dag_processor
       command: dag-processor
       restart: on-failure:3
   
     airflow-init:
       <<: *airflow-common
       container_name: container_airflow_init
       entrypoint: /bin/bash
       restart: "no"
       command:
         - -c
         - |
           echo
           echo "Airflow version:"
           /entrypoint airflow version
       environment:
         <<: *airflow-common-env
         _AIRFLOW_DB_MIGRATE: 'true'
         _AIRFLOW_WWW_USER_CREATE: 'true'
       user: "1000:0"
   
   networks:
     my_services_internal:
       external: true
    ```
   
   Dockerfile
   ```
   FROM apache/airflow:3.1.7-python3.13
   
   RUN pip install apache-airflow-providers-edge3
   ```
   
   
   docker-compose.yml (on the worker node)
   ```
   services:
     airflow-worker:
       # Use the SAME image as your central instance to ensure version match
       build: .
       image: my_airflow:3.1.7
       container_name: container_airflow_worker
       env_file:
       - .env
       restart: always
       environment:
         AIRFLOW__API__BASE_URL: "https://airflow.lan/api/v2";
         AIRFLOW__EDGE__API_URL: "https://airflow.lan/edge_worker/v1/rpcapi";
         AIRFLOW__API_AUTH__JWT_SECRET: ${AIRFLOW__API_AUTH__JWT_SECRET}
   
         AIRFLOW__CORE__EXECUTOR: 
"airflow.providers.edge3.executors.EdgeExecutor"
         AIRFLOW__CORE__DAGS_FOLDER: /opt/airflow/dags
         AIRFLOW__EDGE__JOB_POLL_INTERVAL: 30
   
       volumes:
         - /opt/lan/dags:/opt/airflow/dags
         - /var/lan/airflow/logs:/opt/airflow/logs
   
       user: "1000:0"
   
       command: airflow edge worker --edge-hostname data --queues data
   ```
   
   Both .env files contain the same `AIRFLOW__API_AUTH__JWT_SECRET` and 
`AIRFLOW__CORE__FERNET_KEY`. The dags folder is also synched on both hosts.
   
   As described in earlier comments, 
`https://airflow.lan/edge_worker/v1/rpcapi` became available (no more 404) only 
after adding `--apps all` to the `airflow api-server` command.
   
   My issue is solved; this PR is just to avoid others the same "pain" :) 
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to