KimchaC commented on issue #8605:
URL: https://github.com/apache/airflow/issues/8605#issuecomment-626331675


   Here's my docker-compose config using LocalExecutor...
   
   ### docker-compose.airflow.yml:
   ```yml
   version: '2.1'
   services:
       airflow:
           # image: apache/airflow:1.10.10
           build:
               context: .
               args:
                   - DOCKER_UID=${DOCKER_UID-1000} 
               dockerfile: Dockerfile
           restart: always
           environment:
               - 
AIRFLOW__CORE__SQL_ALCHEMY_CONN=postgres://airflow:${POSTGRES_PW-airflow}@postgres:5432/airflow
               - 
AIRFLOW__CORE__FERNET_KEY=${AF_FERNET_KEY-GUYoGcG5xdn5K3ysGG3LQzOt3cc0UBOEibEPxugDwas=}
               - AIRFLOW__CORE__EXECUTOR=LocalExecutor
               - AIRFLOW__CORE__AIRFLOW_HOME=/opt/airflow/
               - AIRFLOW__CORE__LOAD_EXAMPLES=False
               - AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=False
               - AIRFLOW__CORE__LOGGING_LEVEL=${AF_LOGGING_LEVEL-info}
           volumes:
               - ../airflow/dags:/opt/airflow/dags:z
               - ../airflow/plugins:/opt/airflow/plugins:z
               - ./volumes/airflow_data_dump:/opt/airflow/data_dump:z
               - ./volumes/airflow_logs:/opt/airflow/logs:z
           healthcheck:
               test: ["CMD-SHELL", "[ -f /opt/airflow/airflow-webserver.pid ]"]
               interval: 30s
               timeout: 30s
               retries: 3
   ```
   
   ### docker-compose.yml:
   ```yml
   version: '2.1'
   services:
       postgres:
           image: postgres:9.6
           container_name: af_postgres
           environment:
               - POSTGRES_USER=airflow
               - POSTGRES_PASSWORD=${POSTGRES_PW-airflow}
               - POSTGRES_DB=airflow
               - PGDATA=/var/lib/postgresql/data/pgdata
           volumes:
               - ./volumes/postgres_data:/var/lib/postgresql/data/pgdata:Z
           ports:
               -  127.0.0.1:5432:5432
   
       webserver:
           extends:
               file: docker-compose.airflow.yml
               service: airflow
           container_name: af_webserver
           command: webserver
           depends_on:
               - postgres
           ports:
               - ${DOCKER_PORTS-8080}
           networks:
               - proxy
               - default
           environment:
               # Web Server Config
               - AIRFLOW__WEBSERVER__DAG_DEFAULT_VIEW=graph
               - AIRFLOW__WEBSERVER__HIDE_PAUSED_DAGS_BY_DEFAULT=true
               - AIRFLOW__WEBSERVER__RBAC=true
   
               # Web Server Performance tweaks
               # 2 * NUM_CPU_CORES + 1
               - AIRFLOW__WEBSERVER__WORKERS=${AF_WORKERS-2}
               # Restart workers every 30min instead of 30seconds
               - AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL=1800
           labels:
               - "traefik.enable=true"
               - "traefik.http.routers.airflow.rule=Host(`af.example.com`)"
               - "traefik.http.routers.airflow.middlewares=admin-auth@file"
   
       scheduler:
           extends:
               file: docker-compose.airflow.yml
               service: airflow
           container_name: af_scheduler
           command: scheduler
           depends_on:
               - postgres
           environment:
               # Performance Tweaks
               # Reduce how often DAGs are reloaded to dramatically reduce CPU 
use
               - 
AIRFLOW__SCHEDULER__MIN_FILE_PROCESS_INTERVAL=${AF_MIN_FILE_PROCESS_INTERVAL-60}
 
               - AIRFLOW__SCHEDULER__MAX_THREADS=${AF_THREADS-1}
   
   networks:
       proxy:
           external: true
    ```
   
   ### Dockerfile:
   ```dockerfile
   # Custom Dockerfile
   FROM apache/airflow:1.10.10
   
   # Install mssql support & dag dependencies
   USER root
   RUN apt-get update -yqq \
       && apt-get install -y gcc freetds-dev \
       && apt-get install -y git procps \ 
       && apt-get install -y vim
   RUN pip install apache-airflow[mssql,mssql,ssh,s3,slack] 
   RUN pip install azure-storage-blob sshtunnel google-api-python-client 
oauth2client \
       && pip install 
git+https://github.com/infusionsoft/Official-API-Python-Library.git \
       && pip install rocketchat_API
   
   # This fixes permission issues on linux. 
   # The airflow user should have the same UID as the user running docker on 
the host system.
   # make build is adjust this value automatically
   ARG DOCKER_UID
   RUN \
       : "${DOCKER_UID:?Build argument DOCKER_UID needs to be set and 
non-empty. Use 'make build' to set it automatically.}" \
       && usermod -u ${DOCKER_UID} airflow \
       && find / -path /proc -prune -o -user 50000 -exec chown -h airflow {} \; 
\
       && echo "Set airflow's uid to ${DOCKER_UID}"
   
   USER airflow
   ```
   
   ### Makefile
   And here's my Makefile to control it the containers like `make run`:
   ```makefile
   SERVICE = "scheduler"
   TITLE = "airflow containers"
   ACCESS = "http://af.example.com";
   
   .PHONY: run
   
   build:
        docker-compose build
   
   run:
        @echo "Starting $(TITLE)"
        docker-compose up -d
        @echo "$(TITLE) running on $(ACCESS)"
   
   runf:
        @echo "Starting $(TITLE)"
        docker-compose up
   
   stop:
        @echo "Stopping $(TITLE)"
        docker-compose down
   
   restart: stop print-newline run
   
   tty:
        docker-compose run --rm --entrypoint='' $(SERVICE) bash
   
   ttyr:
        docker-compose run --rm --entrypoint='' -u root $(SERVICE) bash
   
   attach:
        docker-compose exec $(SERVICE) bash
   
   attachr:
        docker-compose exec -u root $(SERVICE) bash
   
   logs:
        docker-compose logs --tail 50 --follow $(SERVICE)
   
   conf:
        docker-compose config
   
   initdb:
        docker-compose run --rm $(SERVICE) initdb
   
   upgradedb:
        docker-compose run --rm $(SERVICE) upgradedb
   
   print-newline:
        @echo ""
        @echo ""
   ```


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to