This is an automated email from the ASF dual-hosted git repository.
kamilbregula pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/master by this push:
new ffc68bc Allow your own Docker production image to be verified by bash
script (#14224)
ffc68bc is described below
commit ffc68bcf79a28f1d136db6e509f2743c26b1fecd
Author: Kamil BreguĊa <[email protected]>
AuthorDate: Sat Feb 27 11:54:17 2021 +0100
Allow your own Docker production image to be verified by bash script
(#14224)
Co-authored-by: Kamil Bregula <[email protected]>
---
scripts/ci/images/ci_verify_ci_image.sh | 21 +--
scripts/ci/images/ci_verify_prod_image.sh | 180 +--------------------
scripts/ci/libraries/_all_libs.sh | 2 +
.../_verify_image.sh} | 136 +++++++++-------
scripts/ci/tools/verify_docker_image.sh | 57 +++++++
5 files changed, 139 insertions(+), 257 deletions(-)
diff --git a/scripts/ci/images/ci_verify_ci_image.sh
b/scripts/ci/images/ci_verify_ci_image.sh
index ee62433..78a2cd0 100755
--- a/scripts/ci/images/ci_verify_ci_image.sh
+++ b/scripts/ci/images/ci_verify_ci_image.sh
@@ -18,25 +18,6 @@
# shellcheck source=scripts/ci/libraries/_script_init.sh
. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
-function verify_ci_image_dependencies() {
- start_end::group_start "Checking if Airflow dependencies are
non-conflicting in ${AIRFLOW_CI_IMAGE} image."
- set +e
- docker run --rm --entrypoint /bin/bash "${AIRFLOW_CI_IMAGE}" -c 'pip check'
- local res=$?
- if [[ ${res} != "0" ]]; then
- echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See
instructions below on how to deal with it. ${COLOR_RESET}"
- echo
- build_images::inform_about_pip_check ""
- else
- echo
- echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} image dependencies
are consistent. ${COLOR_RESET}"
- echo
- fi
- set -e
- start_end::group_end
- exit ${res}
-}
-
function pull_ci_image() {
local
image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
start_end::group_start "Pulling ${image_name_with_tag} image"
@@ -51,4 +32,4 @@ build_images::prepare_ci_build
pull_ci_image
-verify_ci_image_dependencies
+verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}"
diff --git a/scripts/ci/images/ci_verify_prod_image.sh
b/scripts/ci/images/ci_verify_prod_image.sh
index 89d27ee..c492804 100755
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/images/ci_verify_prod_image.sh
@@ -18,180 +18,6 @@
# shellcheck source=scripts/ci/libraries/_script_init.sh
. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
-function run_command_in_image() {
- docker run --rm \
- -e COLUMNS=180 \
- --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" \
- -c "${@}"
-}
-
-FEATURES_OK="true"
-
-function check_feature() {
- DESCRIPTION="${1}"
- COMMAND=${2}
- set +e
- echo -n "Feature: ${DESCRIPTION} "
- local output
- output=$(run_command_in_image "${COMMAND}" 2>&1)
- local res=$?
- if [[ ${res} == "0" ]]; then
- echo "${COLOR_GREEN}OK${COLOR_RESET}"
- else
- echo "${COLOR_RED}NOK${COLOR_RESET}"
- echo "${COLOR_BLUE}========================= OUTPUT start
============================${COLOR_RESET}"
- echo "${output}"
- echo "${COLOR_BLUE}========================= OUTPUT end
===========================${COLOR_RESET}"
- FEATURES_OK="false"
- fi
- set -e
-}
-
-function verify_prod_image_has_airflow_and_providers() {
- start_end::group_start "Verify prod image: ${AIRFLOW_PROD_IMAGE}"
- echo
- echo "Checking if Providers are installed"
- echo
-
- all_providers_installed_in_image=$(run_command_in_image "airflow providers
list --output table")
-
- echo
- echo "Installed providers:"
- echo
- echo "${all_providers_installed_in_image}"
- echo
- local error="false"
- for provider in "${INSTALLED_PROVIDERS[@]}"; do
- echo -n "Verifying if provider ${provider} installed: "
- if [[ ${all_providers_installed_in_image} ==
*"apache-airflow-providers-${provider//./-}"* ]]; then
- echo "${COLOR_GREEN}OK${COLOR_RESET}"
- else
- echo "${COLOR_RED}NOK${COLOR_RESET}"
- error="true"
- fi
- done
- if [[ ${error} == "true" ]]; then
- echo
- echo "${COLOR_RED}ERROR: Some expected providers are not
installed!${COLOR_RESET}"
- echo
- exit 1
- else
- echo
- echo "${COLOR_GREEN}OK. All expected providers
installed!${COLOR_RESET}"
- echo
- fi
- start_end::group_end
-}
-
-function verify_prod_image_dependencies() {
- start_end::group_start "Checking if Airflow dependencies are
non-conflicting in ${AIRFLOW_PROD_IMAGE} image."
-
- set +e
- run_command_in_image 'pip check'
- local res=$?
- if [[ ${res} != "0" ]]; then
- echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See
instructions below on how to deal with it. ${COLOR_RESET}"
- echo
- build_images::inform_about_pip_check "--production "
- exit ${res}
- else
- echo
- echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} image dependencies
are consistent. ${COLOR_RESET}"
- echo
- fi
- set -e
- start_end::group_end
-}
-
-GOOGLE_IMPORTS=(
- 'OpenSSL'
- 'google.ads'
- 'googleapiclient'
- 'google.auth'
- 'google_auth_httplib2'
- 'google.cloud.automl'
- 'google.cloud.bigquery_datatransfer'
- 'google.cloud.bigtable'
- 'google.cloud.container'
- 'google.cloud.datacatalog'
- 'google.cloud.dataproc'
- 'google.cloud.dlp'
- 'google.cloud.kms'
- 'google.cloud.language'
- 'google.cloud.logging'
- 'google.cloud.memcache'
- 'google.cloud.monitoring'
- 'google.cloud.oslogin'
- 'google.cloud.pubsub'
- 'google.cloud.redis'
- 'google.cloud.secretmanager'
- 'google.cloud.spanner'
- 'google.cloud.speech'
- 'google.cloud.storage'
- 'google.cloud.tasks'
- 'google.cloud.texttospeech'
- 'google.cloud.translate'
- 'google.cloud.videointelligence'
- 'google.cloud.vision'
-)
-
-AZURE_IMPORTS=(
- 'azure.batch'
- 'azure.cosmos'
- 'azure.datalake.store'
- 'azure.identity'
- 'azure.keyvault'
- 'azure.kusto.data'
- 'azure.mgmt.containerinstance'
- 'azure.mgmt.datalake.store'
- 'azure.mgmt.resource'
- 'azure.storage'
-)
-
-function verify_production_image_features() {
- start_end::group_start "Verify prod image features: ${AIRFLOW_PROD_IMAGE}"
-
- check_feature "Import: async" "python -c 'import gevent, eventlet,
greenlet'"
- check_feature "Import: amazon" "python -c 'import boto3, botocore,
watchtower'"
- check_feature "Import: celery" "python -c 'import celery, flower, vine'"
- check_feature "Import: cncf.kubernetes" "python -c 'import kubernetes,
cryptography'"
- check_feature "Import: docker" "python -c 'import docker'"
- check_feature "Import: dask" "python -c 'import cloudpickle, distributed'"
- check_feature "Import: elasticsearch" "python -c 'import
elasticsearch,es.elastic, elasticsearch_dsl'"
- check_feature "Import: grpc" "python -c 'import grpc, google.auth,
google_auth_httplib2'"
- check_feature "Import: hashicorp" "python -c 'import hvac'"
- check_feature "Import: ldap" "python -c 'import ldap'"
- for google_import in "${GOOGLE_IMPORTS[@]}"
- do
- check_feature "Import google: ${google_import}" "python -c 'import
${google_import}'"
- done
- for azure_import in "${AZURE_IMPORTS[@]}"
- do
- check_feature "Import azure: ${azure_import}" "python -c 'import
${azure_import}'"
- done
- check_feature "Import: mysql" "python -c 'import mysql'"
- check_feature "Import: postgres" "python -c 'import psycopg2'"
- check_feature "Import: redis" "python -c 'import redis'"
- check_feature "Import: sendgrid" "python -c 'import sendgrid'"
- check_feature "Import: sftp/ssh" "python -c 'import paramiko, pysftp,
sshtunnel'"
- check_feature "Import: slack" "python -c 'import slack_sdk'"
- check_feature "Import: statsd" "python -c 'import statsd'"
- check_feature "Import: virtualenv" "python -c 'import virtualenv'"
-
- if [[ ${FEATURES_OK} == "true" ]]; then
- echo
- echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} features are all OK.
${COLOR_RESET}"
- echo
- else
- echo
- echo "${COLOR_RED}ERROR: Some features were not ok!${COLOR_RESET}"
- echo
- exit 1
- fi
- start_end::group_end
-}
-
-
function pull_prod_image() {
local
image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
start_end::group_start "Pulling the ${image_name_with_tag} image and
tagging with ${AIRFLOW_PROD_IMAGE}"
@@ -204,8 +30,4 @@ build_images::prepare_prod_build
pull_prod_image
-verify_prod_image_has_airflow_and_providers
-
-verify_production_image_features
-
-verify_prod_image_dependencies
+verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}"
diff --git a/scripts/ci/libraries/_all_libs.sh
b/scripts/ci/libraries/_all_libs.sh
index 6024088..a8aedae 100755
--- a/scripts/ci/libraries/_all_libs.sh
+++ b/scripts/ci/libraries/_all_libs.sh
@@ -58,3 +58,5 @@ readonly SCRIPTS_CI_DIR
. "${LIBRARIES_DIR}"/_start_end.sh
# shellcheck source=scripts/ci/libraries/_verbosity.sh
. "${LIBRARIES_DIR}"/_verbosity.sh
+# shellcheck source=scripts/ci/libraries/_verify_image.sh
+. "${LIBRARIES_DIR}"/_verify_image.sh
diff --git a/scripts/ci/images/ci_verify_prod_image.sh
b/scripts/ci/libraries/_verify_image.sh
old mode 100755
new mode 100644
similarity index 50%
copy from scripts/ci/images/ci_verify_prod_image.sh
copy to scripts/ci/libraries/_verify_image.sh
index 89d27ee..5ce9c0d
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/libraries/_verify_image.sh
@@ -15,25 +15,22 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# shellcheck source=scripts/ci/libraries/_script_init.sh
-. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
-
-function run_command_in_image() {
+function verify_image::run_command_in_image() {
docker run --rm \
-e COLUMNS=180 \
- --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" \
+ --entrypoint /bin/bash "${DOCKER_IMAGE}" \
-c "${@}"
}
-FEATURES_OK="true"
+IMAGE_VALID="true"
-function check_feature() {
+function verify_image::check_command() {
DESCRIPTION="${1}"
COMMAND=${2}
set +e
echo -n "Feature: ${DESCRIPTION} "
local output
- output=$(run_command_in_image "${COMMAND}" 2>&1)
+ output=$(verify_image::run_command_in_image "${COMMAND}" 2>&1)
local res=$?
if [[ ${res} == "0" ]]; then
echo "${COLOR_GREEN}OK${COLOR_RESET}"
@@ -42,18 +39,18 @@ function check_feature() {
echo "${COLOR_BLUE}========================= OUTPUT start
============================${COLOR_RESET}"
echo "${output}"
echo "${COLOR_BLUE}========================= OUTPUT end
===========================${COLOR_RESET}"
- FEATURES_OK="false"
+ IMAGE_VALID="false"
fi
set -e
}
-function verify_prod_image_has_airflow_and_providers() {
- start_end::group_start "Verify prod image: ${AIRFLOW_PROD_IMAGE}"
+function verify_image::verify_prod_image_has_airflow_and_providers() {
+ start_end::group_start "Verify prod image: ${DOCKER_IMAGE}"
echo
echo "Checking if Providers are installed"
echo
- all_providers_installed_in_image=$(run_command_in_image "airflow providers
list --output table")
+ all_providers_installed_in_image=$(verify_image::run_command_in_image
"airflow providers list --output table")
echo
echo "Installed providers:"
@@ -74,7 +71,7 @@ function verify_prod_image_has_airflow_and_providers() {
echo
echo "${COLOR_RED}ERROR: Some expected providers are not
installed!${COLOR_RESET}"
echo
- exit 1
+ IMAGE_VALID="false"
else
echo
echo "${COLOR_GREEN}OK. All expected providers
installed!${COLOR_RESET}"
@@ -83,20 +80,39 @@ function verify_prod_image_has_airflow_and_providers() {
start_end::group_end
}
-function verify_prod_image_dependencies() {
- start_end::group_start "Checking if Airflow dependencies are
non-conflicting in ${AIRFLOW_PROD_IMAGE} image."
+function verify_image::verify_ci_image_dependencies() {
+ start_end::group_start "Checking if Airflow dependencies are
non-conflicting in ${DOCKER_IMAGE} image."
+ set +e
+ docker run --rm --entrypoint /bin/bash "${DOCKER_IMAGE}" -c 'pip check'
+ local res=$?
+ if [[ ${res} != "0" ]]; then
+ echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See
instructions below on how to deal with it. ${COLOR_RESET}"
+ echo
+ build_images::inform_about_pip_check ""
+ IMAGE_VALID="false"
+ else
+ echo
+ echo "${COLOR_GREEN}OK. The ${DOCKER_IMAGE} image dependencies are
consistent. ${COLOR_RESET}"
+ echo
+ fi
+ set -e
+ start_end::group_end
+}
+
+function verify_image::verify_prod_image_dependencies() {
+ start_end::group_start "Checking if Airflow dependencies are
non-conflicting in ${DOCKER_IMAGE} image."
set +e
- run_command_in_image 'pip check'
+ verify_image::run_command_in_image 'pip check'
local res=$?
if [[ ${res} != "0" ]]; then
echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See
instructions below on how to deal with it. ${COLOR_RESET}"
echo
build_images::inform_about_pip_check "--production "
- exit ${res}
+ IMAGE_VALID="false"
else
echo
- echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} image dependencies
are consistent. ${COLOR_RESET}"
+ echo "${COLOR_GREEN}OK. The ${DOCKER_IMAGE} image dependencies are
consistent. ${COLOR_RESET}"
echo
fi
set -e
@@ -148,39 +164,43 @@ AZURE_IMPORTS=(
'azure.storage'
)
-function verify_production_image_features() {
- start_end::group_start "Verify prod image features: ${AIRFLOW_PROD_IMAGE}"
-
- check_feature "Import: async" "python -c 'import gevent, eventlet,
greenlet'"
- check_feature "Import: amazon" "python -c 'import boto3, botocore,
watchtower'"
- check_feature "Import: celery" "python -c 'import celery, flower, vine'"
- check_feature "Import: cncf.kubernetes" "python -c 'import kubernetes,
cryptography'"
- check_feature "Import: docker" "python -c 'import docker'"
- check_feature "Import: dask" "python -c 'import cloudpickle, distributed'"
- check_feature "Import: elasticsearch" "python -c 'import
elasticsearch,es.elastic, elasticsearch_dsl'"
- check_feature "Import: grpc" "python -c 'import grpc, google.auth,
google_auth_httplib2'"
- check_feature "Import: hashicorp" "python -c 'import hvac'"
- check_feature "Import: ldap" "python -c 'import ldap'"
+function verify_image::verify_production_image_python_modules() {
+ start_end::group_start "Verify prod image features: ${DOCKER_IMAGE}"
+
+ verify_image::check_command "Import: async" "python -c 'import gevent,
eventlet, greenlet'"
+ verify_image::check_command "Import: amazon" "python -c 'import boto3,
botocore, watchtower'"
+ verify_image::check_command "Import: celery" "python -c 'import celery,
flower, vine'"
+ verify_image::check_command "Import: cncf.kubernetes" "python -c 'import
kubernetes, cryptography'"
+ verify_image::check_command "Import: docker" "python -c 'import docker'"
+ verify_image::check_command "Import: dask" "python -c 'import cloudpickle,
distributed'"
+ verify_image::check_command "Import: elasticsearch" "python -c 'import
elasticsearch,es.elastic, elasticsearch_dsl'"
+ verify_image::check_command "Import: grpc" "python -c 'import grpc,
google.auth, google_auth_httplib2'"
+ verify_image::check_command "Import: hashicorp" "python -c 'import hvac'"
+ verify_image::check_command "Import: ldap" "python -c 'import ldap'"
for google_import in "${GOOGLE_IMPORTS[@]}"
do
- check_feature "Import google: ${google_import}" "python -c 'import
${google_import}'"
+ verify_image::check_command "Import google: ${google_import}" "python
-c 'import ${google_import}'"
done
for azure_import in "${AZURE_IMPORTS[@]}"
do
- check_feature "Import azure: ${azure_import}" "python -c 'import
${azure_import}'"
+ verify_image::check_command "Import azure: ${azure_import}" "python -c
'import ${azure_import}'"
done
- check_feature "Import: mysql" "python -c 'import mysql'"
- check_feature "Import: postgres" "python -c 'import psycopg2'"
- check_feature "Import: redis" "python -c 'import redis'"
- check_feature "Import: sendgrid" "python -c 'import sendgrid'"
- check_feature "Import: sftp/ssh" "python -c 'import paramiko, pysftp,
sshtunnel'"
- check_feature "Import: slack" "python -c 'import slack_sdk'"
- check_feature "Import: statsd" "python -c 'import statsd'"
- check_feature "Import: virtualenv" "python -c 'import virtualenv'"
-
- if [[ ${FEATURES_OK} == "true" ]]; then
+ verify_image::check_command "Import: mysql" "python -c 'import mysql'"
+ verify_image::check_command "Import: postgres" "python -c 'import
psycopg2'"
+ verify_image::check_command "Import: redis" "python -c 'import redis'"
+ verify_image::check_command "Import: sendgrid" "python -c 'import
sendgrid'"
+ verify_image::check_command "Import: sftp/ssh" "python -c 'import
paramiko, pysftp, sshtunnel'"
+ verify_image::check_command "Import: slack" "python -c 'import slack_sdk'"
+ verify_image::check_command "Import: statsd" "python -c 'import statsd'"
+ verify_image::check_command "Import: virtualenv" "python -c 'import
virtualenv'"
+
+ start_end::group_end
+}
+
+function verify_image::display_result {
+ if [[ ${IMAGE_VALID} == "true" ]]; then
echo
- echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} features are all OK.
${COLOR_RESET}"
+ echo "${COLOR_GREEN}OK. The ${DOCKER_IMAGE} features are all OK.
${COLOR_RESET}"
echo
else
echo
@@ -188,24 +208,24 @@ function verify_production_image_features() {
echo
exit 1
fi
- start_end::group_end
}
+function verify_image::verify_prod_image {
+ IMAGE_VALID="true"
+ DOCKER_IMAGE="${1}"
+ verify_image::verify_prod_image_has_airflow_and_providers
-function pull_prod_image() {
- local
image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
- start_end::group_start "Pulling the ${image_name_with_tag} image and
tagging with ${AIRFLOW_PROD_IMAGE}"
+ verify_image::verify_production_image_python_modules
- push_pull_remove_images::pull_image_github_dockerhub
"${AIRFLOW_PROD_IMAGE}" "${image_name_with_tag}"
- start_end::group_end
-}
-
-build_images::prepare_prod_build
+ verify_image::verify_prod_image_dependencies
-pull_prod_image
-
-verify_prod_image_has_airflow_and_providers
+ verify_image::display_result
+}
-verify_production_image_features
+function verify_image::verify_ci_image {
+ IMAGE_VALID="true"
+ DOCKER_IMAGE="${1}"
+ verify_image::verify_ci_image_dependencies
-verify_prod_image_dependencies
+ verify_image::display_result
+}
diff --git a/scripts/ci/tools/verify_docker_image.sh
b/scripts/ci/tools/verify_docker_image.sh
new file mode 100755
index 0000000..3ef5e3e
--- /dev/null
+++ b/scripts/ci/tools/verify_docker_image.sh
@@ -0,0 +1,57 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# shellcheck source=scripts/ci/libraries/_script_init.sh
+. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
+
+usage() {
+local cmdname
+cmdname="$(basename -- "$0")"
+
+cat << EOF
+Usage: ${cmdname} <IMAGE_TYPE> <DOCKER_IMAGE>
+
+Verify the user-specified docker image.
+
+Image Type can be one of the two values: CI or PROD
+
+EOF
+}
+
+
+if [[ "$#" -ne 2 ]]; then
+ >&2 echo "You must provide two argument - image type [PROD/CI] and image
name."
+ usage
+ exit 1
+fi
+
+IMAGE_TYPE="${1}"
+IMAGE_NAME="${2}"
+
+if ! docker image inspect "${IMAGE_NAME}" &>/dev/null; then
+ >&2 echo "Image '${IMAGE_NAME}' doesn't exists in local registry."
+ exit 1
+fi
+
+if [ "$(echo "${IMAGE_TYPE}" | tr '[:lower:]' '[:upper:]')" = "PROD" ]; then
+ verify_image::verify_prod_image "${IMAGE_NAME}"
+elif [ "$(echo "${IMAGE_TYPE}" | tr '[:lower:]' '[:upper:]')" = "CI" ]; then
+ verify_image::verify_ci_image "${IMAGE_NAME}"
+else
+ >&2 echo "Unsupported image type. Supported values: PROD, CI"
+ exit 1
+fi