This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new aa7ed8c0240 Run DB check only for core components in prod entrypoint
(#63413)
aa7ed8c0240 is described below
commit aa7ed8c02405b2b7a7dd54d5f0aab860a09378a0
Author: Shae Alhusayni <[email protected]>
AuthorDate: Thu Apr 2 10:50:11 2026 -0700
Run DB check only for core components in prod entrypoint (#63413)
* Run DB check only for core commands in prod entrypoint
* Handle Helm bash -c airflow commands in DB check gating
---
Dockerfile | 11 ++++++++++-
scripts/docker/entrypoint_prod.sh | 14 +++++++++++---
2 files changed, 21 insertions(+), 4 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index d86e6cd7e48..6c5f6ebb920 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1417,6 +1417,14 @@ EOF
COPY <<"EOF" /entrypoint_prod.sh
#!/usr/bin/env bash
AIRFLOW_COMMAND="${1:-}"
+AIRFLOW_COMMAND_TO_RUN="${AIRFLOW_COMMAND}"
+if [[ "${AIRFLOW_COMMAND}" == "airflow" ]]; then
+ AIRFLOW_COMMAND_TO_RUN="${2:-}"
+elif [[ "${AIRFLOW_COMMAND}" =~ ^(bash|sh)$ ]] \
+ && [[ "${2:-}" == "-c" ]] \
+ && [[ "${3:-}" =~
(^|[[:space:]])(exec[[:space:]]+)?airflow[[:space:]]+(scheduler|dag-processor|triggerer|api-server)([[:space:]]|$)
]]; then
+ AIRFLOW_COMMAND_TO_RUN="${BASH_REMATCH[3]}"
+fi
set -euo pipefail
@@ -1668,7 +1676,8 @@ readonly CONNECTION_CHECK_SLEEP_TIME
create_system_user_if_missing
set_pythonpath_for_root_user
-if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
+if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]] \
+ && [[ ${AIRFLOW_COMMAND_TO_RUN} =~
^(scheduler|dag-processor|triggerer|api-server)$ ]]; then
wait_for_airflow_db
fi
diff --git a/scripts/docker/entrypoint_prod.sh
b/scripts/docker/entrypoint_prod.sh
index 88135c145c1..3f431531749 100755
--- a/scripts/docker/entrypoint_prod.sh
+++ b/scripts/docker/entrypoint_prod.sh
@@ -17,6 +17,14 @@
# under the License.
# Might be empty
AIRFLOW_COMMAND="${1:-}"
+AIRFLOW_COMMAND_TO_RUN="${AIRFLOW_COMMAND}"
+if [[ "${AIRFLOW_COMMAND}" == "airflow" ]]; then
+ AIRFLOW_COMMAND_TO_RUN="${2:-}"
+elif [[ "${AIRFLOW_COMMAND}" =~ ^(bash|sh)$ ]] \
+ && [[ "${2:-}" == "-c" ]] \
+ && [[ "${3:-}" =~
(^|[[:space:]])(exec[[:space:]]+)?airflow[[:space:]]+(scheduler|dag-processor|triggerer|api-server)([[:space:]]|$)
]]; then
+ AIRFLOW_COMMAND_TO_RUN="${BASH_REMATCH[3]}"
+fi
set -euo pipefail
@@ -283,7 +291,8 @@ readonly CONNECTION_CHECK_SLEEP_TIME
create_system_user_if_missing
set_pythonpath_for_root_user
-if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
+if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]] \
+ && [[ ${AIRFLOW_COMMAND_TO_RUN} =~
^(scheduler|dag-processor|triggerer|api-server)$ ]]; then
wait_for_airflow_db
fi
@@ -316,8 +325,7 @@ if [[ -n "${_PIP_ADDITIONAL_REQUIREMENTS=}" ]] ; then
fi
-# The `bash` and `python` commands should also verify the basic connections
-# So they are run after the DB check
+# Handle direct `bash` and `python` passthrough commands.
exec_to_bash_or_python_command_if_specified "${@}"
# Remove "airflow" if it is specified as airflow command