This is an automated email from the ASF dual-hosted git repository.

taragolis pushed a commit to branch revert-29408-docker-compose-change-example
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 9da37717449f5267b4ef8c4962cb89f6fa5f102c
Author: Andrey Anshin <[email protected]>
AuthorDate: Tue Feb 21 01:33:42 2023 +0400

    Revert "Improve health checks in example docker-compose and clarify usage 
(#29408)"
    
    This reverts commit 08bec93e5cb651c5180af13bad1c43d763c7583d.
---
 .../howto/docker-compose/docker-compose.yaml       | 40 ++++++----------------
 1 file changed, 11 insertions(+), 29 deletions(-)

diff --git a/docs/apache-airflow/howto/docker-compose/docker-compose.yaml 
b/docs/apache-airflow/howto/docker-compose/docker-compose.yaml
index 719fffe8ab..38e8e8b471 100644
--- a/docs/apache-airflow/howto/docker-compose/docker-compose.yaml
+++ b/docs/apache-airflow/howto/docker-compose/docker-compose.yaml
@@ -36,15 +36,11 @@
 # _AIRFLOW_WWW_USER_PASSWORD   - Password for the administrator account (if 
requested).
 #                                Default: airflow
 # _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when 
starting all containers.
-#                                Use this option ONLY for quick checks. 
Installing requirements at container
-#                                startup is done EVERY TIME the service is 
started.
-#                                A better way is to build a custom image or 
extend the official image
-#                                as described in 
https://airflow.apache.org/docs/docker-stack/build.html.
 #                                Default: ''
 #
 # Feel free to modify this file to suit your needs.
 ---
-version: '3.8'
+version: '3'
 x-airflow-common:
   &airflow-common
   # In order to add custom dependencies or upgrade provider packages you can 
use your extended image.
@@ -64,13 +60,6 @@ x-airflow-common:
     AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
     AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
     AIRFLOW__API__AUTH_BACKENDS: 
'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
-    # yamllint disable rule:line-length
-    # Use simple http server on scheduler for health checks
-    # See 
https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server
-    # yamllint enable rule:line-length
-    AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
-    # WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks
-    # for other purpose (development, test and especially production usage) 
build/extend Airflow image.
     _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
   volumes:
     - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
@@ -95,9 +84,8 @@ services:
       - postgres-db-volume:/var/lib/postgresql/data
     healthcheck:
       test: ["CMD", "pg_isready", "-U", "airflow"]
-      interval: 10s
+      interval: 5s
       retries: 5
-      start_period: 5s
     restart: always
 
   redis:
@@ -106,23 +94,21 @@ services:
       - 6379
     healthcheck:
       test: ["CMD", "redis-cli", "ping"]
-      interval: 10s
+      interval: 5s
       timeout: 30s
       retries: 50
-      start_period: 30s
     restart: always
 
   airflow-webserver:
     <<: *airflow-common
     command: webserver
     ports:
-      - "8080:8080"
+      - 8080:8080
     healthcheck:
       test: ["CMD", "curl", "--fail", "http://localhost:8080/health";]
-      interval: 30s
+      interval: 10s
       timeout: 10s
       retries: 5
-      start_period: 30s
     restart: always
     depends_on:
       <<: *airflow-common-depends-on
@@ -133,11 +119,10 @@ services:
     <<: *airflow-common
     command: scheduler
     healthcheck:
-      test: ["CMD", "curl", "--fail", "http://localhost:8974/health";]
-      interval: 30s
+      test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob 
--hostname "$${HOSTNAME}"']
+      interval: 10s
       timeout: 10s
       retries: 5
-      start_period: 30s
     restart: always
     depends_on:
       <<: *airflow-common-depends-on
@@ -151,10 +136,9 @@ services:
       test:
         - "CMD-SHELL"
         - 'celery --app airflow.executors.celery_executor.app inspect ping -d 
"celery@$${HOSTNAME}"'
-      interval: 30s
+      interval: 10s
       timeout: 10s
       retries: 5
-      start_period: 30s
     environment:
       <<: *airflow-common-env
       # Required to handle warm shutdown of the celery workers properly
@@ -171,10 +155,9 @@ services:
     command: triggerer
     healthcheck:
       test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob 
--hostname "$${HOSTNAME}"']
-      interval: 30s
+      interval: 10s
       timeout: 10s
       retries: 5
-      start_period: 30s
     restart: always
     depends_on:
       <<: *airflow-common-depends-on
@@ -281,13 +264,12 @@ services:
     profiles:
       - flower
     ports:
-      - "5555:5555"
+      - 5555:5555
     healthcheck:
       test: ["CMD", "curl", "--fail", "http://localhost:5555/";]
-      interval: 30s
+      interval: 10s
       timeout: 10s
       retries: 5
-      start_period: 30s
     restart: always
     depends_on:
       <<: *airflow-common-depends-on

Reply via email to