This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 76c430655b1c54b4f7d5854be4ce442dde35f0bd
Author: Kamil BreguĊ‚a <[email protected]>
AuthorDate: Thu Jun 11 18:50:31 2020 +0200

    Add generic CLI tool wrapper (#9223)
    
    * Add generic  CLI tool wrapper
    
    * Pas working directory to container
    
    * Share namespaces between all containers
    
    * Fix permissions hack
    
    * Unify code style
    
    Co-authored-by: Felix Uellendall <[email protected]>
    
    * Detect standalone execution by checking symboli link
    
    * User friendly error message when env var is missing
    
    * Display error to stderr
    
    * Display errors on stderr
    
    * Fix permission hack
    
    * Fix condition in if
    
    * Fix missing env-file
    
    * TEST: Install airflow without copying ssources
    
    * Update scripts/ci/in_container/run_prepare_backport_readme.sh
    
    Co-authored-by: Felix Uellendall <[email protected]>
    (cherry picked from commit f17a02d33047ebbfd9f92d3d1d54d6d810f596c1)
---
 Dockerfile.ci                            |   2 +-
 scripts/ci/docker-compose/local-prod.yml |   2 +-
 scripts/ci/docker-compose/local.yml      |   2 +-
 scripts/ci/libraries/_local_mounts.sh    |   2 +-
 scripts/ci/prepare_tool_scripts.sh       |  64 ------------
 scripts/ci/run_cli_tool.sh               | 167 +++++++++++++++++++++++++++++++
 6 files changed, 171 insertions(+), 68 deletions(-)

diff --git a/Dockerfile.ci b/Dockerfile.ci
index 1549214..24ee87d 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -315,7 +315,7 @@ RUN if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
         pip install ${ADDITIONAL_PYTHON_DEPS}; \
     fi
 
-RUN scripts/ci/prepare_tool_scripts.sh
+RUN source <(bash scripts/ci/run_cli_tool.sh)
 
 WORKDIR ${AIRFLOW_SOURCES}
 
diff --git a/scripts/ci/docker-compose/local-prod.yml 
b/scripts/ci/docker-compose/local-prod.yml
index a82b4f8..6342d33 100644
--- a/scripts/ci/docker-compose/local-prod.yml
+++ b/scripts/ci/docker-compose/local-prod.yml
@@ -35,7 +35,7 @@ services:
       - ../../../setup.cfg:/opt/airflow/setup.cfg:cached
       - ../../../setup.py:/opt/airflow/setup.py:cached
       - ../../../tests:/opt/airflow/tests:cached
-      - ../../../tmp:/opt/airflow/tmp:cached
+      - ../../../tmp:/tmp:cached
     environment:
       - HOST_USER_ID
       - HOST_GROUP_ID
diff --git a/scripts/ci/docker-compose/local.yml 
b/scripts/ci/docker-compose/local.yml
index 3c9e40b..ff88c6c 100644
--- a/scripts/ci/docker-compose/local.yml
+++ b/scripts/ci/docker-compose/local.yml
@@ -54,7 +54,7 @@ services:
       - ../../../setup.py:/opt/airflow/setup.py:cached
       - ../../../tests:/opt/airflow/tests:cached
       - ../../../kubernetes_tests:/opt/airflow/kubernetes_tests:cached
-      - ../../../tmp:/opt/airflow/tmp:cached
+      - ../../../tmp:/tmp:cached
       # END automatically generated volumes from LOCAL_MOUNTS in 
_local_mounts.sh
     environment:
       - HOST_USER_ID
diff --git a/scripts/ci/libraries/_local_mounts.sh 
b/scripts/ci/libraries/_local_mounts.sh
index 5750600..24a2eb3 100644
--- a/scripts/ci/libraries/_local_mounts.sh
+++ b/scripts/ci/libraries/_local_mounts.sh
@@ -52,7 +52,7 @@ function generate_local_mounts_list {
         "$prefix"setup.py:/opt/airflow/setup.py:cached
         "$prefix"tests:/opt/airflow/tests:cached
         "$prefix"kubernetes_tests:/opt/airflow/kubernetes_tests:cached
-        "$prefix"tmp:/opt/airflow/tmp:cached
+        "$prefix"tmp:/tmp:cached
     )
 }
 
diff --git a/scripts/ci/prepare_tool_scripts.sh 
b/scripts/ci/prepare_tool_scripts.sh
deleted file mode 100755
index 7a98c50..0000000
--- a/scripts/ci/prepare_tool_scripts.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-set -euo pipefail
-
-function prepare_tool_script() {
-    IMAGE="${1}"
-    VOLUME="${2}"
-    TOOL="${3}"
-    COMMAND="${4:-}"
-
-    TARGET_TOOL_PATH="/usr/bin/${TOOL}"
-    TARGET_TOOL_UPDATE_PATH="/usr/bin/${TOOL}-update"
-
-    cat >"${TARGET_TOOL_PATH}" <<EOF
-#!/usr/bin/env bash
-docker run --rm -it \
-    -v "\${HOST_AIRFLOW_SOURCES}/tmp:/tmp" \
-    -v "\${HOST_AIRFLOW_SOURCES}/files:/files" \
-    -v "\${HOST_AIRFLOW_SOURCES}:/opt/airflow" \
-    -v "\${HOST_HOME}/${VOLUME}:/root/${VOLUME}" \
-    "${IMAGE}" ${COMMAND} "\$@"
-RES=\$?
-if [[ \${HOST_OS} == "Linux" ]]; then
-    docker run --rm \
-        -v "\${HOST_AIRFLOW_SOURCES}/tmp:/tmp" \
-        -v "\${HOST_AIRFLOW_SOURCES}/files:/files" \
-        -v "\${HOST_HOME}/${VOLUME}:/root/${VOLUME}" \
-        "\${AIRFLOW_CI_IMAGE}" bash -c \
-        "find '/tmp/' '/files/' '/root/${VOLUME}' -user root -print0 | xargs 
--null chown '\${HOST_USER_ID}.\${HOST_GROUP_ID}' --no-dereference" >/dev/null 
2>&1
-fi
-exit \${RES}
-EOF
-
-    cat >"${TARGET_TOOL_UPDATE_PATH}" <<EOF
-#!/usr/bin/env bash
-docker pull "${IMAGE}"
-EOF
-
-    chmod a+x "${TARGET_TOOL_PATH}" "${TARGET_TOOL_UPDATE_PATH}"
-}
-
-GCLOUD_IMAGE="gcr.io/google.com/cloudsdktool/cloud-sdk:latest"
-
-prepare_tool_script "amazon/aws-cli:latest" ".aws" aws
-prepare_tool_script "mcr.microsoft.com/azure-cli:latest" ".azure" az az
-prepare_tool_script "${GCLOUD_IMAGE}" ".config/gcloud" bq bq
-prepare_tool_script "${GCLOUD_IMAGE}" ".config/gcloud" gcloud gcloud
-prepare_tool_script "${GCLOUD_IMAGE}" ".config/gcloud" gsutil gsutil
diff --git a/scripts/ci/run_cli_tool.sh b/scripts/ci/run_cli_tool.sh
new file mode 100755
index 0000000..cf840bc
--- /dev/null
+++ b/scripts/ci/run_cli_tool.sh
@@ -0,0 +1,167 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+set -euo pipefail
+
+if [ -z "${AIRFLOW_CI_IMAGE}" ]; then
+    >&2 echo "Missing environment variable AIRFLOW_CI_IMAGE"
+    exit 1
+fi
+if [ -z "${HOST_AIRFLOW_SOURCES}" ]; then
+    >&2 echo "Missing environment variable HOST_AIRFLOW_SOURCES"
+    exit 1
+fi
+if [ -z "${HOST_USER_ID}" ]; then
+    >&2 echo "Missing environment variable HOST_USER_ID"
+    exit 1
+fi
+if [ -z "${HOST_GROUP_ID}" ]; then
+    >&2 echo "Missing environment variable HOST_GROUP_ID"
+    exit 1
+fi
+
+SCRIPT_NAME="$( basename "${BASH_SOURCE[0]}")"
+# Drop "-update" suffix, if exists
+TOOL_NAME="$(echo "${SCRIPT_NAME}" | cut -d "-" -f 1)"
+
+SUPPORTED_TOOL_NAMES=("aws" "az" "gcloud" "bq" "gsutil" "terraform" "java")
+
+if [ ! -L "${BASH_SOURCE[0]}" ]
+then
+    # Direct execution - return installation script
+    >&2 echo "# CLI tool wrappers"
+    >&2 echo "#"
+    >&2 echo "# To install, run the following command:"
+    >&2 echo "#     source <(bash ${SCRIPT_PATH@Q})"
+    >&2 echo "#"
+    >&2 echo ""
+    # Print installation script
+    for NAME in "${SUPPORTED_TOOL_NAMES[@]}"
+    do
+        echo "ln -s ${SCRIPT_PATH@Q} /usr/bin/${NAME}"
+        echo "ln -s ${SCRIPT_PATH@Q} /usr/bin/${NAME}-update"
+        echo "chmod +x /usr/bin/${NAME} /usr/bin/${NAME}-update"
+    done
+    exit 0
+fi
+ENV_TMP_FILE=$(mktemp)
+env > "${ENV_TMP_FILE}"
+cleanup() {
+    rm "${ENV_TMP_FILE}"
+}
+trap cleanup EXIT
+
+CONTAINER_ID="$(head -n 1 < /proc/self/cgroup | cut -d ":" -f 3 | cut -d "/" 
-f 3)"
+
+COMMON_DOCKER_ARGS=(
+    # Share namespaces between all containers.
+    # This way we are even closer to run those tools like if they were 
installed.
+    # More information: 
https://docs.docker.com/get-started/overview/#namespaces
+    --ipc "container:${CONTAINER_ID}"
+    --pid "container:${CONTAINER_ID}"
+    --network "container:${CONTAINER_ID}"
+    -v "${HOST_AIRFLOW_SOURCES}/tmp:/tmp"
+    -v "${HOST_AIRFLOW_SOURCES}/files:/files"
+    -v "${HOST_AIRFLOW_SOURCES}:/opt/airflow"
+    --env-file "${ENV_TMP_FILE}"
+    -w "${PWD}"
+)
+
+AWS_CREDENTIALS_DOCKER_ARGS=(-v "${HOST_HOME}/.aws:/root/.aws")
+AZURE_CREDENTIALS_DOCKER_ARGS=(-v "${HOST_HOME}/.azure:/root/.azure")
+GOOGLE_CREDENTIALS_DOCKER_ARGS=(-v 
"${HOST_HOME}/.config/gcloud:/root/.config/gcloud")
+
+DIRECTORIES_TO_FIX=('/tmp/' '/files/')
+
+COMMAND=("${@}")
+
+# Configure selected tool
+case "${TOOL_NAME}" in
+    aws )
+        COMMON_DOCKER_ARGS+=("${AWS_CREDENTIALS_DOCKER_ARGS[@]}")
+        DIRECTORIES_TO_FIX+=("/root/.aws")
+        IMAGE_NAME="amazon/aws-cli:latest"
+        ;;
+    az )
+        COMMON_DOCKER_ARGS+=("${AZURE_CREDENTIALS_DOCKER_ARGS[@]}")
+        DIRECTORIES_TO_FIX+=("/root/.azure")
+        IMAGE_NAME="mcr.microsoft.com/azure-cli:latest"
+        ;;
+    gcloud | bq | gsutil )
+        COMMON_DOCKER_ARGS+=("${GOOGLE_CREDENTIALS_DOCKER_ARGS[@]}")
+        DIRECTORIES_TO_FIX+=("/root/.config/gcloud")
+        IMAGE_NAME="gcr.io/google.com/cloudsdktool/cloud-sdk:latest"
+        COMMAND=("$TOOL_NAME" "${@}")
+        ;;
+    terraform )
+        COMMON_DOCKER_ARGS+=(
+            "${GOOGLE_CREDENTIALS_DOCKER_ARGS[@]}"
+            "${AZURE_CREDENTIALS_DOCKER_ARGS[@]}"
+            "${AWS_CREDENTIALS_DOCKER_ARGS[@]}"
+        )
+        DIRECTORIES_TO_FIX+=(
+            "/root/.config/gcloud"
+            "/root/.aws"
+            "/root/.azure"
+        )
+        IMAGE_NAME="hashicorp/terraform:latest"
+        ;;
+    java )
+        # TODO: Should we add other credentials?
+        COMMON_DOCKER_ARGS+=("${GOOGLE_CREDENTIALS_DOCKER_ARGS[@]}")
+        DIRECTORIES_TO_FIX+=("/root/.config/gcloud")
+        IMAGE_NAME="openjdk:8-jre-slim"
+        COMMAND=("/usr/local/openjdk-8/bin/java" "${@}")
+        ;;
+    * )
+        >&2 echo "Unsupported tool name: ${TOOL_NAME}"
+        exit 1
+        ;;
+esac
+
+# Run update, if requested
+if [[ "${SCRIPT_NAME}" == *-update ]]; then
+    docker pull "${IMAGE_NAME}"
+    exit $?
+fi
+
+# Otherwise, run tool
+TOOL_DOCKER_ARGS=(--rm --interactive)
+TOOL_DOCKER_ARGS+=("${COMMON_DOCKER_ARGS[@]}")
+
+if [ -t 0 ] ; then
+    TOOL_DOCKER_ARGS+=(
+        --tty
+    )
+fi
+
+docker run "${TOOL_DOCKER_ARGS[@]}" "${IMAGE_NAME}" "${COMMAND[@]}"
+
+RES=$?
+
+# Set file permissions to the host user
+if [[ "${HOST_OS}" == "Linux" ]]; then
+    FIX_DOCKER_ARGS=(--rm)
+    FIX_DOCKER_ARGS+=("${COMMON_DOCKER_ARGS[@]}")
+    FIX_COMMAND=(bash -c
+        "find ${DIRECTORIES_TO_FIX[@]@Q} -user root -print0 | xargs --null 
chown '${HOST_USER_ID}.${HOST_GROUP_ID}' --no-dereference")
+
+    docker run "${FIX_DOCKER_ARGS[@]}" "${AIRFLOW_CI_IMAGE}" 
"${FIX_COMMAND[@]}" >/dev/null 2>&1
+fi
+
+exit ${RES}

Reply via email to