This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-0-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit b4faca25f9335e178bcee65c9ba1ba15fd7dc4a0
Author: Jarek Potiuk <[email protected]>
AuthorDate: Tue Mar 23 04:13:17 2021 +0100

    Much easier to use and better documented Docker image (#14911)
    
    Previously you had to specify AIRFLOW_VERSION_REFERENCE and
    AIRFLOW_CONSTRAINTS_REFERENCE to point to the right version
    of Airflow. Now those values are auto-detected if not specified
    (but you can still override them)
    
    This change allowed to simplify and restructure the Dockerfile
    documentation - following the recent change in separating out
    the docker-stack, production image building documentation has
    been improved to reflect those simplifications. It should be
    much easier to grasp by the novice users now - very clear
    distinction and separation is made between the two types of
    building your own images - customizing or extending - and it
    is now much easier to follow examples and find out how to
    build your own image. The criteria on which approach to
    choose were put first and forefront.
    
    Examples have been reviewed, fixed and put in a logical
    sequence. From the most basic ones to the most advanced,
    with clear indication where the basic aproach ends and where
    the "power-user" one starts. The examples were also separated
    out to separate files and included from there - also the
    example Docker images and build commands are executable
    and tested automatically in CI, so they are guaranteed
    to work.
    
    Finally The build arguments were split into sections - from most
    basic to most advanced and each section links to appropriate
    example section, showing how to use those parameters.
    
    Fixes: #14848
    Fixes: #14255
---
 .github/workflows/ci.yml                           |  24 ++
 Dockerfile                                         |  64 ++---
 Dockerfile.ci                                      |  15 +-
 IMAGES.rst                                         |   5 +-
 breeze                                             |   5 -
 docs/docker-stack/build-arg-ref.rst                | 267 ++++++++++++---------
 .../customizing/add-build-essential-custom.sh      |  33 +++
 .../docker-examples/customizing/custom-sources.sh  |  48 ++++
 .../customizing/github-different-repository.sh     |  31 +++
 .../docker-examples/customizing/github-master.sh   |  31 +++
 .../customizing/github-v2-0-test.sh                |  31 +++
 .../customizing/pypi-dev-runtime-deps.sh           |  34 +++
 .../customizing/pypi-extras-and-deps.sh            |  32 +++
 .../customizing/pypi-selected-version.sh           |  30 +++
 .../docker-examples/customizing/stable-airflow.sh  |  28 +++
 .../extending/add-apt-packages/Dockerfile          |  27 +++
 .../add-build-essential-extend/Dockerfile          |  28 +++
 .../extending/add-pypi-packages/Dockerfile         |  20 ++
 .../extending/embedding-dags/Dockerfile            |  22 ++
 .../extending/embedding-dags/test_dag.py           |  39 +++
 .../restricted/restricted_environments.sh          |  44 ++++
 scripts/ci/images/ci_run_prod_image_test.sh        |  50 ++++
 .../ci_test_examples_of_prod_image_building.sh     |  91 +++++++
 scripts/ci/libraries/_build_images.sh              |   1 +
 scripts/ci/libraries/_docker_engine_resources.sh   |   9 +-
 scripts/ci/libraries/_initialization.sh            |   1 +
 scripts/ci/libraries/_parallel.sh                  |  70 +++++-
 scripts/ci/testing/ci_run_airflow_testing.sh       |  59 +----
 scripts/docker/common.sh                           |  63 +++++
 scripts/docker/compile_www_assets.sh               |   5 +-
 scripts/docker/install_airflow.sh                  |  18 +-
 scripts/docker/install_airflow_from_branch_tip.sh  |  13 +-
 .../docker/install_from_docker_context_files.sh    |  26 +-
 33 files changed, 993 insertions(+), 271 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b47022d..79eb7fb 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -216,6 +216,7 @@ jobs:
           fi
 
   test-openapi-client-generation:
+    timeout-minutes: 10
     name: "Test OpenAPI client generation"
     runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
     needs: [build-info]
@@ -229,6 +230,29 @@ jobs:
       - name: "Generate client codegen diff"
         run: ./scripts/ci/openapi/client_codegen_diff.sh
 
+  test-examples-of-prod-image-building:
+    timeout-minutes: 60
+    name: "Test examples of production image building"
+    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+    needs: [build-info]
+    if: needs.build-info.outputs.image-build == 'true'
+    steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          fetch-depth: 2
+          persist-credentials: false
+      - name: "Free space"
+        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        if: |
+          needs.build-info.outputs.waitForImage == 'true'
+      - name: "Setup python"
+        uses: actions/setup-python@v2
+        with:
+          python-version: ${{needs.build-info.outputs.defaultPythonVersion}}
+      - name: "Test examples of PROD image building"
+        run: ./scripts/ci/images/ci_test_examples_of_prod_image_building.sh
+
   ci-images:
     timeout-minutes: 120
     name: "Wait for CI images"
diff --git a/Dockerfile b/Dockerfile
index 4b1b807..a98b729 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -33,7 +33,7 @@
 #                        all the build essentials. This makes the image
 #                        much smaller.
 #
-ARG AIRFLOW_VERSION="2.0.0.dev0"
+ARG AIRFLOW_VERSION="2.0.1"
 ARG 
AIRFLOW_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
 ARG ADDITIONAL_AIRFLOW_EXTRAS=""
 ARG ADDITIONAL_PYTHON_DEPS=""
@@ -45,7 +45,6 @@ ARG AIRFLOW_GID="50000"
 ARG CASS_DRIVER_BUILD_CONCURRENCY="8"
 
 ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster"
-ARG PYTHON_MAJOR_MINOR_VERSION="3.6"
 
 ARG AIRFLOW_PIP_VERSION=20.2.4
 
@@ -61,9 +60,6 @@ SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"]
 ARG PYTHON_BASE_IMAGE
 ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE}
 
-ARG PYTHON_MAJOR_MINOR_VERSION
-ENV PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION}
-
 # Make sure noninteractive debian install is used and language variables set
 ENV DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 
LC_ALL=C.UTF-8 \
     LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8
@@ -165,12 +161,16 @@ ENV 
AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_
 ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow"
 ENV CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY}
 
-ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0"
-ARG AIRFLOW_CONSTRAINTS="constraints"
-ARG AIRFLOW_CONSTRAINTS="constraints"
-ARG 
AIRFLOW_CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${AIRFLOW_CONSTRAINTS_REFERENCE}/${AIRFLOW_CONSTRAINTS}-${PYTHON_MAJOR_MINOR_VERSION}.txt";
+ARG AIRFLOW_CONSTRAINTS="constraints-2.0"
+ENV AIRFLOW_CONSTRAINTS=${AIRFLOW_CONSTRAINTS}
+ARG AIRFLOW_CONSTRAINTS_REFERENCE=""
+ENV AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE}
+ARG AIRFLOW_CONSTRAINTS_LOCATION=""
 ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}
 
+ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-master"
+ENV DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}
+
 ENV PATH=${PATH}:/root/.local/bin
 RUN mkdir -p /root/.local/bin
 
@@ -204,6 +204,26 @@ ENV 
AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
 ARG INSTALL_PROVIDERS_FROM_SOURCES="false"
 ENV INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES}
 
+# This is airflow version that is put in the label of the image build
+ARG AIRFLOW_VERSION
+ENV AIRFLOW_VERSION=${AIRFLOW_VERSION}
+
+# Determines the way airflow is installed. By default we install airflow from 
PyPI `apache-airflow` package
+# But it also can be `.` from local installation or GitHub URL pointing to 
specific branch or tag
+# Of Airflow. Note That for local source installation you need to have local 
sources of
+# Airflow checked out together with the Dockerfile and AIRFLOW_SOURCES_FROM 
and AIRFLOW_SOURCES_TO
+# set to "." and "/opt/airflow" respectively.
+ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
+ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}
+
+# By default latest released version of airflow is installed (when empty) but 
this value can be overridden
+# and we can install version according to specification (For example ==2.0.2 
or <3.0.0).
+ARG AIRFLOW_VERSION_SPECIFICATION=""
+ENV AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION}
+
+# Only copy common.sh to not invalidate cache on other script changes
+COPY scripts/docker/common.sh /scripts/docker/common.sh
+
 # Only copy install_airflow_from_branch_tip.sh to not invalidate cache on 
other script changes
 COPY scripts/docker/install_airflow_from_branch_tip.sh 
/scripts/docker/install_airflow_from_branch_tip.sh
 
@@ -236,27 +256,10 @@ COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO}
 ARG CASS_DRIVER_BUILD_CONCURRENCY
 ENV CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY}
 
-# This is airflow version that is put in the label of the image build
-ARG AIRFLOW_VERSION
-ENV AIRFLOW_VERSION=${AIRFLOW_VERSION}
-
 # Add extra python dependencies
 ARG ADDITIONAL_PYTHON_DEPS=""
 ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS}
 
-# Determines the way airflow is installed. By default we install airflow from 
PyPI `apache-airflow` package
-# But it also can be `.` from local installation or GitHub URL pointing to 
specific branch or tag
-# Of Airflow. Note That for local source installation you need to have local 
sources of
-# Airflow checked out together with the Dockerfile and AIRFLOW_SOURCES_FROM 
and AIRFLOW_SOURCES_TO
-# set to "." and "/opt/airflow" respectively.
-ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
-ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}
-
-# By default latest released version of airflow is installed (when empty) but 
this value can be overridden
-# and we can install version according to specification (For example ==2.0.2 
or <3.0.0).
-ARG AIRFLOW_VERSION_SPECIFICATION=""
-ENV AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION}
-
 # We can set this value to true in case we want to install .whl .tar.gz 
packages placed in the
 # docker-context-files folder. This can be done for both - additional packages 
you want to install
 # and for airflow as well (you have to set INSTALL_FROM_PYPI to false in this 
case)
@@ -274,7 +277,7 @@ ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI}
 # * urllib3 - required to keep boto3 happy
 # * pytz<2021.0: required by snowflake provider
 # * pyjwt<2.0.0: flask-jwt-extended requires it
-ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 urllib3<1.26 pytz<2021.0 
pyjwt<2.0.0"
+ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 urllib3<1.26 pyjwt<2.0.0"
 
 WORKDIR /opt/airflow
 
@@ -284,11 +287,10 @@ ARG CONTINUE_ON_PIP_CHECK_FAILURE="false"
 COPY scripts/docker/install*.sh /scripts/docker/
 
 # hadolint ignore=SC2086, SC2010
-RUN if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
-        bash /scripts/docker/install_airflow.sh; \
-    fi; \
-    if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
+RUN if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
         bash /scripts/docker/install_from_docker_context_files.sh; \
+    elif [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
+        bash /scripts/docker/install_airflow.sh; \
     fi; \
     if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
         bash /scripts/docker/install_additional_dependencies.sh; \
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 9629621..49c31d6 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -26,9 +26,6 @@ ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE}
 ARG AIRFLOW_VERSION="2.0.0.dev0"
 ENV AIRFLOW_VERSION=$AIRFLOW_VERSION
 
-ARG PYTHON_MAJOR_MINOR_VERSION="3.6"
-ENV PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION}
-
 # Print versions
 RUN echo "Base image: ${PYTHON_BASE_IMAGE}"
 RUN echo "Airflow version: ${AIRFLOW_VERSION}"
@@ -241,11 +238,16 @@ RUN echo "Installing with extras: ${AIRFLOW_EXTRAS}."
 ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow"
 ENV CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY}
 
-ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${AIRFLOW_BRANCH}"
 ARG AIRFLOW_CONSTRAINTS="constraints"
-ARG 
AIRFLOW_CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${AIRFLOW_CONSTRAINTS_REFERENCE}/${AIRFLOW_CONSTRAINTS}-${PYTHON_MAJOR_MINOR_VERSION}.txt";
+ENV AIRFLOW_CONSTRAINTS=${AIRFLOW_CONSTRAINTS}
+ARG AIRFLOW_CONSTRAINTS_REFERENCE=""
+ENV AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE}
+ARG AIRFLOW_CONSTRAINTS_LOCATION=""
 ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}
 
+ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-master"
+ENV DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}
+
 # By changing the CI build epoch we can force reinstalling Airflow and pip all 
dependencies
 # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH 
environment variable.
 ARG AIRFLOW_CI_BUILD_EPOCH="3"
@@ -292,6 +294,9 @@ ENV PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR}
 
 RUN pip install --no-cache-dir --upgrade "pip==${AIRFLOW_PIP_VERSION}"
 
+# Only copy common.sh to not invalidate further layers
+COPY scripts/docker/common.sh /scripts/docker/common.sh
+
 # Only copy install_airflow_from_branch_tip.sh to not invalidate cache on 
other script changes
 COPY scripts/docker/install_airflow_from_branch_tip.sh 
/scripts/docker/install_airflow_from_branch_tip.sh
 
diff --git a/IMAGES.rst b/IMAGES.rst
index 0304bd7..3e00f41 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -454,7 +454,6 @@ additional apt dev and runtime dependencies.
     --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="2.0.0" \
     --build-arg AIRFLOW_VERSION_SPECIFICATION="==2.0.0" \
-    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
@@ -489,7 +488,6 @@ based on example in `this comment 
<https://github.com/apache/airflow/issues/8605
     --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="2.0.0" \
     --build-arg AIRFLOW_VERSION_SPECIFICATION="==2.0.0" \
-    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \
@@ -567,7 +565,7 @@ The following build arguments (``--build-arg`` in docker 
build command) can be u
 |                                          |                                   
       | set to true. Default location from       |
 |                                          |                                   
       | GitHub is used in this case.             |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``            
       | reference (branch or tag) from GitHub    |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE``        |                                   
       | reference (branch or tag) from GitHub    |
 |                                          |                                   
       | repository from which constraints are    |
 |                                          |                                   
       | used. By default it is set to            |
 |                                          |                                   
       | ``constraints-master`` but can be        |
@@ -575,6 +573,7 @@ The following build arguments (``--build-arg`` in docker 
build command) can be u
 |                                          |                                   
       | ``constraints-1-10`` for 1.10.* versions |
 |                                          |                                   
       | or it could point to specific version    |
 |                                          |                                   
       | for example ``constraints-2.0.0``        |
+|                                          |                                   
       | is empty, it is auto-detected            |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
 | ``INSTALL_PROVIDERS_FROM_SOURCES``       | ``true``                          
       | If set to false and image is built from  |
 |                                          |                                   
       | sources, all provider packages are not   |
diff --git a/breeze b/breeze
index 94a8b86..4df4937 100755
--- a/breeze
+++ b/breeze
@@ -881,11 +881,6 @@ function breeze::parse_arguments() {
             INSTALL_AIRFLOW_VERSION="${2}"
             # Reference is mutually exclusive with version
             INSTALL_AIRFLOW_REFERENCE=""
-            # Skip mounting local sources when airflow is installed from remote
-            if [[ ${INSTALL_AIRFLOW_VERSION} =~ ^[0-9\.]*$ ]]; then
-                echo "Install providers from PyPI"
-                INSTALL_PROVIDERS_FROM_SOURCES="false"
-            fi
             echo "Installs version of Airflow: ${INSTALL_AIRFLOW_VERSION}"
             echo
             shift 2
diff --git a/docs/docker-stack/build-arg-ref.rst 
b/docs/docker-stack/build-arg-ref.rst
index 57d4da5..2ec04c8 100644
--- a/docs/docker-stack/build-arg-ref.rst
+++ b/docs/docker-stack/build-arg-ref.rst
@@ -18,99 +18,77 @@
 Image build arguments reference
 -------------------------------
 
-The following build arguments (``--build-arg`` in docker build command) can be 
used for production images:
+The following build arguments (``--build-arg`` in docker build command) can be 
used for production images.
+Those arguments are used when you want to customize the image. You can see 
some examples of it in
+:ref:`Building from PyPI packages<image-build-pypi>`.
+
+Basic arguments
+...............
+
+Those are the most common arguments that you use when you want to build a 
custom image.
 
 
+------------------------------------------+------------------------------------------+------------------------------------------+
 | Build argument                           | Default value                     
       | Description                              |
 
+==========================================+==========================================+==========================================+
 | ``PYTHON_BASE_IMAGE``                    | ``python:3.6-slim-buster``        
       | Base python image.                       |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``PYTHON_MAJOR_MINOR_VERSION``           | ``3.6``                           
       | major/minor version of Python (should    |
-|                                          |                                   
       | match base image).                       |
-+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_VERSION``                      | ``2.0.1.dev0``                    
       | version of Airflow.                      |
+| ``AIRFLOW_VERSION``                      | ``2.0.1``                         
       | version of Airflow.                      |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_REPO``                         | ``apache/airflow``                
       | the repository from which PIP            |
-|                                          |                                   
       | dependencies are pre-installed.          |
+| ``AIRFLOW_EXTRAS``                       | (see Dockerfile)                  
       | Default extras with which airflow is     |
+|                                          |                                   
       | installed.                               |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_BRANCH``                       | ``master``                        
       | the branch from which PIP dependencies   |
-|                                          |                                   
       | are pre-installed initially.             |
+| ``ADDITIONAL_AIRFLOW_EXTRAS``            |                                   
       | Optional additional extras with which    |
+|                                          |                                   
       | airflow is installed.                    |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_CONSTRAINTS_LOCATION``         |                                   
       | If not empty, it will override the       |
-|                                          |                                   
       | source of the constraints with the       |
-|                                          |                                   
       | specified URL or file. Note that the     |
-|                                          |                                   
       | file has to be in docker context so      |
-|                                          |                                   
       | it's best to place such file in          |
-|                                          |                                   
       | one of the folders included in           |
-|                                          |                                   
       | ``.dockerignore`` file.                  |
+| ``AIRFLOW_HOME``                         | ``/opt/airflow``                  
       | Airflow’s HOME (that’s where logs and    |
+|                                          |                                   
       | SQLite databases are stored).            |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``            
       | Reference (branch or tag) from GitHub    |
-|                                          |                                   
       | where constraints file is taken from     |
-|                                          |                                   
       | It can be ``constraints-master`` but     |
-|                                          |                                   
       | also can be ``constraints-1-10`` for     |
-|                                          |                                   
       | 1.10.* installation. In case of building |
-|                                          |                                   
       | specific version you want to point it    |
-|                                          |                                   
       | to specific tag, for example             |
-|                                          |                                   
       | ``constraints-1.10.15``.                 |
+| ``AIRFLOW_USER_HOME_DIR``                | ``/home/airflow``                 
       | Home directory of the Airflow user.      |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``INSTALL_PROVIDERS_FROM_SOURCES``       | ``false``                         
       | If set to ``true`` and image is built    |
-|                                          |                                   
       | from sources, all provider packages are  |
-|                                          |                                   
       | installed from sources rather than from  |
-|                                          |                                   
       | packages. It has no effect when          |
-|                                          |                                   
       | installing from PyPI or GitHub repo.     |
+| ``AIRFLOW_PIP_VERSION``                  | ``20.2.4``                        
       | PIP version used.                        |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_EXTRAS``                       | (see Dockerfile)                  
       | Default extras with which airflow is     |
-|                                          |                                   
       | installed.                               |
+| ``PIP_PROGRESS_BAR``                     | ``on``                            
       | Progress bar for PIP installation        |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``INSTALL_FROM_PYPI``                    | ``true``                          
       | If set to true, Airflow is installed     |
-|                                          |                                   
       | from PyPI. if you want to install        |
-|                                          |                                   
       | Airflow from self-build package          |
-|                                          |                                   
       | you can set it to false, put package in  |
-|                                          |                                   
       | ``docker-context-files`` and set         |
-|                                          |                                   
       | ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` to |
-|                                          |                                   
       | ``true``. For this you have to also keep |
-|                                          |                                   
       | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` flag |
-|                                          |                                   
       | set to ``false``.                        |
+| ``AIRFLOW_UID``                          | ``50000``                         
       | Airflow user UID.                        |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``false``                         
       | Allows to pre-cache airflow PIP packages |
-|                                          |                                   
       | from the GitHub of Apache Airflow        |
-|                                          |                                   
       | This allows to optimize iterations for   |
-|                                          |                                   
       | Image builds and speeds up CI builds.    |
+| ``AIRFLOW_GID``                          | ``50000``                         
       | Airflow group GID. Note that most files  |
+|                                          |                                   
       | created on behalf of airflow user belong |
+|                                          |                                   
       | to the ``root`` group (0) to keep        |
+|                                          |                                   
       | OpenShift Guidelines compatibility.      |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``INSTALL_FROM_DOCKER_CONTEXT_FILES``    | ``false``                         
       | If set to true, Airflow, providers and   |
-|                                          |                                   
       | all dependencies are installed from      |
-|                                          |                                   
       | from locally built/downloaded            |
-|                                          |                                   
       | .whl and .tar.gz files placed in the     |
-|                                          |                                   
       | ``docker-context-files``. In certain     |
-|                                          |                                   
       | corporate environments, this is required |
-|                                          |                                   
       | to install airflow from such pre-vetted  |
-|                                          |                                   
       | packages rather than from PyPI. For this |
-|                                          |                                   
       | to work, also set ``INSTALL_FROM_PYPI``. |
-|                                          |                                   
       | Note that packages starting with         |
-|                                          |                                   
       | ``apache?airflow`` glob are treated      |
-|                                          |                                   
       | differently than other packages. All     |
-|                                          |                                   
       | ``apache?airflow`` packages are          |
-|                                          |                                   
       | installed with dependencies limited by   |
-|                                          |                                   
       | airflow constraints. All other packages  |
-|                                          |                                   
       | are installed without dependencies       |
-|                                          |                                   
       | 'as-is'. If you wish to install airflow  |
-|                                          |                                   
       | via 'pip download' with all dependencies |
-|                                          |                                   
       | downloaded, you have to rename the       |
-|                                          |                                   
       | apache airflow and provider packages to  |
-|                                          |                                   
       | not start with ``apache?airflow`` glob.  |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE``        |                                   
       | Reference (branch or tag) from GitHub    |
+|                                          |                                   
       | where constraints file is taken from     |
+|                                          |                                   
       | It can be ``constraints-master`` but     |
+|                                          |                                   
       | can be ``constraints-1-10`` for 1.10.*   |
+|                                          |                                   
       | versions of ``constraints-2-0`` for      |
+|                                          |                                   
       | 2.0.* installation. In case of building  |
+|                                          |                                   
       | specific version you want to point it    |
+|                                          |                                   
       | to specific tag, for example             |
+|                                          |                                   
       | ``constraints-2.0.1``.                   |
+|                                          |                                   
       | Auto-detected if empty.                  |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``UPGRADE_TO_NEWER_DEPENDENCIES``        | ``false``                         
       | If set to true, the dependencies are     |
-|                                          |                                   
       | upgraded to newer versions matching      |
-|                                          |                                   
       | setup.py before installation.            |
+
+Image optimization options
+..........................
+
+The main advantage of Customization method of building Airflow image, is that 
it allows to build highly optimized image because
+the final image (RUNTIME) might not contain all the dependencies that are 
needed to build and install all other dependencies
+(DEV). Those arguments allow to control what is installed in the DEV image and 
what is installed in RUNTIME one, thus
+allowing to produce much more optimized images. See :ref:`Building optimized 
images<image-build-optimized>`.
+for examples of using those arguments.
+
 
+------------------------------------------+------------------------------------------+------------------------------------------+
+| Build argument                           | Default value                     
       | Description                              |
++==========================================+==========================================+==========================================+
 | ``CONTINUE_ON_PIP_CHECK_FAILURE``        | ``false``                         
       | By default the image build fails if pip  |
 |                                          |                                   
       | check fails for it. This is good for     |
 |                                          |                                   
       | interactive building but on CI the       |
 |                                          |                                   
       | image should be built regardless - we    |
 |                                          |                                   
       | have a separate step to verify image.    |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``ADDITIONAL_AIRFLOW_EXTRAS``            |                                   
       | Optional additional extras with which    |
-|                                          |                                   
       | airflow is installed.                    |
+| ``UPGRADE_TO_NEWER_DEPENDENCIES``        | ``false``                         
       | If set to true, the dependencies are     |
+|                                          |                                   
       | upgraded to newer versions matching      |
+|                                          |                                   
       | setup.py before installation.            |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_PYTHON_DEPS``               |                                   
       | Optional python packages to extend       |
 |                                          |                                   
       | the image with some extra dependencies.  |
@@ -149,18 +127,6 @@ The following build arguments (``--build-arg`` in docker 
build command) can be u
 | ``ADDITIONAL_RUNTIME_APT_ENV``           |                                   
       | Additional env variables defined         |
 |                                          |                                   
       | when installing runtime deps.            |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_HOME``                         | ``/opt/airflow``                  
       | Airflow’s HOME (that’s where logs and    |
-|                                          |                                   
       | SQLite databases are stored).            |
-+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_UID``                          | ``50000``                         
       | Airflow user UID.                        |
-+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_GID``                          | ``50000``                         
       | Airflow group GID. Note that most files  |
-|                                          |                                   
       | created on behalf of airflow user belong |
-|                                          |                                   
       | to the ``root`` group (0) to keep        |
-|                                          |                                   
       | OpenShift Guidelines compatibility.      |
-+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_USER_HOME_DIR``                | ``/home/airflow``                 
       | Home directory of the Airflow user.      |
-+------------------------------------------+------------------------------------------+------------------------------------------+
 | ``CASS_DRIVER_BUILD_CONCURRENCY``        | ``8``                             
       | Number of processors to use for          |
 |                                          |                                   
       | cassandra PIP install (speeds up         |
 |                                          |                                   
       | installing in case cassandra extra is    |
@@ -170,43 +136,106 @@ The following build arguments (``--build-arg`` in docker 
build command) can be u
 |                                          |                                   
       | The mysql extra is removed from extras   |
 |                                          |                                   
       | if the client is not installed.          |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_PIP_VERSION``                  | ``20.2.4``                        
       | PIP version used.                        |
+
+Installing Airflow using different methods
+..........................................
+
+Those parameters are useful only if you want to install Airflow using 
different installation methods than the default
+(installing from PyPI packages).
+
+This is usually only useful if you have your own fork of Airflow and want to 
build the images locally from
+those sources - either locally or directly from GitHub sources. This way you 
do not need to release your
+Airflow and Providers via PyPI - they can be installed directly from sources 
or from GitHub repository.
+Another option of installation is to build Airflow from previously prepared 
binary Python packages which might
+be useful if you need to build Airflow in environments that require high 
levels of security.
+
+You can see some examples of those in:
+  * :ref:`Building from GitHub<image-build-github>`,
+  * :ref:`Using custom installation sources<image-build-custom>`,
+  * :ref:`Build images in security restricted 
environments<image-build-secure-environments>`
+
 
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``PIP_PROGRESS_BAR``                     | ``on``                            
       | Progress bar for PIP installation        |
+| Build argument                           | Default value                     
       | Description                              |
++==========================================+==========================================+==========================================+
+| ``AIRFLOW_INSTALLATION_METHOD``          | ``apache-airflow``                
       | Installation method of Apache Airflow.   |
+|                                          |                                   
       | ``apache-airflow`` for installation from |
+|                                          |                                   
       | PyPI. It can be GitHub repository URL    |
+|                                          |                                   
       | including branch or tag to install from  |
+|                                          |                                   
       | that repository or "." to install from   |
+|                                          |                                   
       | local sources. Installing from sources   |
+|                                          |                                   
       | requires appropriate values of the       |
+|                                          |                                   
       | ``AIRFLOW_SOURCES_FROM`` and             |
+|                                          |                                   
       | ``AIRFLOW_SOURCES_TO`` variables (see    |
+|                                          |                                   
       | below)                                   |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_SOURCES_FROM``                 | ``empty``                         
       | Sources of Airflow. Set it to "." when   |
+|                                          |                                   
       | you install Airflow from local sources   |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_SOURCES_TO``                   | ``/empty``                        
       | Target for Airflow sources. Set to       |
+|                                          |                                   
       | "/opt/airflow" when you install Airflow  |
+|                                          |                                   
       | from local sources.                      |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_VERSION_SPECIFICATION``        |                                   
       | Optional - might be used for using limit |
+|                                          |                                   
       | for Airflow version installation - for   |
+|                                          |                                   
       | example ``<2.0.2`` for automated builds. |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``INSTALL_PROVIDERS_FROM_SOURCES``       | ``false``                         
       | If set to ``true`` and image is built    |
+|                                          |                                   
       | from sources, all provider packages are  |
+|                                          |                                   
       | installed from sources rather than from  |
+|                                          |                                   
       | packages. It has no effect when          |
+|                                          |                                   
       | installing from PyPI or GitHub repo.     |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_CONSTRAINTS_LOCATION``         |                                   
       | If not empty, it will override the       |
+|                                          |                                   
       | source of the constraints with the       |
+|                                          |                                   
       | specified URL or file. Note that the     |
+|                                          |                                   
       | file has to be in docker context so      |
+|                                          |                                   
       | it's best to place such file in          |
+|                                          |                                   
       | one of the folders included in           |
+|                                          |                                   
       | ``.dockerignore`` file.                  |
 
+------------------------------------------+------------------------------------------+------------------------------------------+
+| ``INSTALL_FROM_DOCKER_CONTEXT_FILES``    | ``false``                         
       | If set to true, Airflow, providers and   |
+|                                          |                                   
       | all dependencies are installed from      |
+|                                          |                                   
       | from locally built/downloaded            |
+|                                          |                                   
       | .whl and .tar.gz files placed in the     |
+|                                          |                                   
       | ``docker-context-files``. In certain     |
+|                                          |                                   
       | corporate environments, this is required |
+|                                          |                                   
       | to install airflow from such pre-vetted  |
+|                                          |                                   
       | packages rather than from PyPI. For this |
+|                                          |                                   
       | to work, also set ``INSTALL_FROM_PYPI``. |
+|                                          |                                   
       | Note that packages starting with         |
+|                                          |                                   
       | ``apache?airflow`` glob are treated      |
+|                                          |                                   
       | differently than other packages. All     |
+|                                          |                                   
       | ``apache?airflow`` packages are          |
+|                                          |                                   
       | installed with dependencies limited by   |
+|                                          |                                   
       | airflow constraints. All other packages  |
+|                                          |                                   
       | are installed without dependencies       |
+|                                          |                                   
       | 'as-is'. If you wish to install airflow  |
+|                                          |                                   
       | via 'pip download' with all dependencies |
+|                                          |                                   
       | downloaded, you have to rename the       |
+|                                          |                                   
       | apache airflow and provider packages to  |
+|                                          |                                   
       | not start with ``apache?airflow`` glob.  |
++------------------------------------------+------------------------------------------+------------------------------------------+
+
+Pre-caching PIP dependencies
+............................
+
+When image is build from PIP, by default pre-caching of PIP dependencies is 
used. This is in order to speed-up incremental
+builds during development. When pre-cached PIP dependencies are used and 
``setup.py`` or ``setup.cfg`` changes, the
+PIP dependencies are already pre-installed, thus resulting in much faster 
image rebuild. This is purely an optimization
+of time needed to build the images and should be disabled if you want to 
install Airflow from
+docker context files.
 
-There are build arguments that determine the installation mechanism of Apache 
Airflow for the
-production image. There are three types of build:
-
-* From local sources (by default for example when you use ``docker build .``)
-* You can build the image from released PyPI airflow package (used to build 
the official Docker image)
-* You can build the image from any version in GitHub repository(this is used 
mostly for system testing).
-
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
-| Build argument                    | Default                | What to specify 
                                                                  |
-+===================================+========================+===================================================================================+
-| ``AIRFLOW_INSTALLATION_METHOD``   | ``apache-airflow``     | Should point to 
the installation method of Apache Airflow. It can be              |
-|                                   |                        | 
``apache-airflow`` for installation from packages and URL to installation from  
  |
-|                                   |                        | GitHub 
repository tag or branch or "." to install from sources.                   |
-|                                   |                        | Note that 
installing from local sources requires appropriate values of the        |
-|                                   |                        | 
``AIRFLOW_SOURCES_FROM`` and ``AIRFLOW_SOURCES_TO`` variables as described 
below. |
-|                                   |                        | Only used when 
``INSTALL_FROM_PYPI`` is set to ``true``.                          |
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
-| ``AIRFLOW_VERSION_SPECIFICATION`` |                        | Optional - 
might be used for package installation of different Airflow version    |
-|                                   |                        | for 
example"==2.0.1". For consistency, you should also set``AIRFLOW_VERSION``     |
-|                                   |                        | to the same 
value AIRFLOW_VERSION is resolved as label in the image created.      |
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | ``constraints-master`` | Reference 
(branch or tag) from GitHub where constraints file is taken from.       |
-|                                   |                        | It can be 
``constraints-master`` but also can be``constraints-1-10`` for          |
-|                                   |                        | 1.10.*  
installations. In case of building specific version                       |
-|                                   |                        | you want to 
point it to specific tag, for example ``constraints-2.0.1``           |
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
-| ``AIRFLOW_WWW``                   | ``www``                | In case of 
Airflow 2.0 it should be "www", in case of Airflow 1.10                |
-|                                   |                        | series it 
should be "www_rbac".                                                   |
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
-| ``AIRFLOW_SOURCES_FROM``          | ``empty``              | Sources of 
Airflow. Set it to "." when you install airflow from                   |
-|                                   |                        | local sources.  
                                                                  |
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
-| ``AIRFLOW_SOURCES_TO``            | ``/empty``             | Target for 
Airflow sources. Set to "/opt/airflow" when                            |
-|                                   |                        | you want to 
install airflow from local sources.                                   |
-+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
++------------------------------------------+------------------------------------------+------------------------------------------+
+| Build argument                           | Default value                     
       | Description                              |
++==========================================+==========================================+==========================================+
+| ``AIRFLOW_BRANCH``                       | ``master``                        
       | the branch from which PIP dependencies   |
+|                                          |                                   
       | are pre-installed initially.             |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_REPO``                         | ``apache/airflow``                
       | the repository from which PIP            |
+|                                          |                                   
       | dependencies are pre-installed.          |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``false``                         
       | Allows to pre-cache airflow PIP packages |
+|                                          |                                   
       | from the GitHub of Apache Airflow        |
+|                                          |                                   
       | This allows to optimize iterations for   |
+|                                          |                                   
       | Image builds and speeds up CI builds.    |
++------------------------------------------+------------------------------------------+------------------------------------------+
diff --git 
a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh 
b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
new file mode 100755
index 0000000..7164470
--- /dev/null
+++ 
b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.6-slim-buster" \
+    --build-arg AIRFLOW_VERSION="2.0.1" \
+    --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \
+    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="openmpi-common" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git a/docs/docker-stack/docker-examples/customizing/custom-sources.sh 
b/docs/docker-stack/docker-examples/customizing/custom-sources.sh
new file mode 100755
index 0000000..242fc2e
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/custom-sources.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . -f Dockerfile \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+    --build-arg AIRFLOW_VERSION="2.0.1" \
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \
+    --build-arg ADDITIONAL_PYTHON_DEPS=" \
+        azure-storage-blob \
+        oauth2client \
+        beautifulsoup4 \
+        dateparser \
+        rocketchat_API \
+        typeform" \
+    --build-arg ADDITIONAL_DEV_APT_COMMAND="curl 
https://packages.microsoft.com/keys/microsoft.asc | \
+    apt-key add --no-tty - && \
+    curl https://packages.microsoft.com/config/debian/10/prod.list > 
/etc/apt/sources.list.d/mssql-release.list" \
+    --build-arg ADDITIONAL_DEV_APT_ENV="ACCEPT_EULA=Y" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="msodbcsql17 unixodbc-dev g++" \
+    --build-arg ADDITIONAL_RUNTIME_APT_COMMAND="curl 
https://packages.microsoft.com/keys/microsoft.asc | \
+    apt-key add --no-tty - && \
+    curl https://packages.microsoft.com/config/debian/10/prod.list > 
/etc/apt/sources.list.d/mssql-release.list" \
+    --build-arg ADDITIONAL_RUNTIME_APT_ENV="ACCEPT_EULA=Y" \
+    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="msodbcsql17 unixodbc git procps 
vim" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git 
a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh 
b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
new file mode 100755
index 0000000..b980b5b
--- /dev/null
+++ 
b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \
+    --build-arg 
AIRFLOW_INSTALLATION_METHOD="https://github.com/potiuk/airflow/archive/master.tar.gz#egg=apache-airflow";
 \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" \
+    --build-arg CONSTRAINTS_GITHUB_REPOSITORY="potiuk/airflow" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git a/docs/docker-stack/docker-examples/customizing/github-master.sh 
b/docs/docker-stack/docker-examples/customizing/github-master.sh
new file mode 100755
index 0000000..4237e91
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/github-master.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+    --build-arg 
AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/master.tar.gz#egg=apache-airflow";
 \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git a/docs/docker-stack/docker-examples/customizing/github-v2-0-test.sh 
b/docs/docker-stack/docker-examples/customizing/github-v2-0-test.sh
new file mode 100755
index 0000000..b893618
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/github-v2-0-test.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \
+    --build-arg 
AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/v2-0-test.tar.gz#egg=apache-airflow";
 \
+    --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git 
a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh 
b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh
new file mode 100755
index 0000000..43a8092
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.6-slim-buster" \
+    --build-arg AIRFLOW_VERSION="2.0.1" \
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \
+    --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \
+    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git 
a/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh 
b/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh
new file mode 100755
index 0000000..7d150bc
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \
+    --build-arg AIRFLOW_VERSION="2.0.1" \
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \
+    --build-arg ADDITIONAL_PYTHON_DEPS="oauth2client" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git 
a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh 
b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh
new file mode 100755
index 0000000..98e06a1
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+    --build-arg AIRFLOW_VERSION="2.0.1" \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git a/docs/docker-stack/docker-examples/customizing/stable-airflow.sh 
b/docs/docker-stack/docker-examples/customizing/stable-airflow.sh
new file mode 100755
index 0000000..d3471ac
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/stable-airflow.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START build]
+docker build . \
+    --tag "$(basename "$0")"
+# [END build]
+docker rmi --force "$(basename "$0")"
diff --git 
a/docs/docker-stack/docker-examples/extending/add-apt-packages/Dockerfile 
b/docs/docker-stack/docker-examples/extending/add-apt-packages/Dockerfile
new file mode 100644
index 0000000..8fb128e
--- /dev/null
+++ b/docs/docker-stack/docker-examples/extending/add-apt-packages/Dockerfile
@@ -0,0 +1,27 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Dockerfile. It is not intended for PRODUCTION use
+# [START Dockerfile]
+FROM apache/airflow:2.0.1
+USER root
+RUN apt-get update \
+  && apt-get install -y --no-install-recommends \
+         vim \
+  && apt-get autoremove -yqq --purge \
+  && apt-get clean \
+  && rm -rf /var/lib/apt/lists/*
+USER airflow
+# [END Dockerfile]
diff --git 
a/docs/docker-stack/docker-examples/extending/add-build-essential-extend/Dockerfile
 
b/docs/docker-stack/docker-examples/extending/add-build-essential-extend/Dockerfile
new file mode 100644
index 0000000..f0dc0d1
--- /dev/null
+++ 
b/docs/docker-stack/docker-examples/extending/add-build-essential-extend/Dockerfile
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Dockerfile. It is not intended for PRODUCTION use
+# [START Dockerfile]
+FROM apache/airflow:2.0.1
+USER root
+RUN apt-get update \
+  && apt-get install -y --no-install-recommends \
+         build-essential libopenmpi-dev \
+  && apt-get autoremove -yqq --purge \
+  && apt-get clean \
+  && rm -rf /var/lib/apt/lists/*
+USER airflow
+RUN pip install --no-cache-dir mpi4py
+# [END Dockerfile]
diff --git 
a/docs/docker-stack/docker-examples/extending/add-pypi-packages/Dockerfile 
b/docs/docker-stack/docker-examples/extending/add-pypi-packages/Dockerfile
new file mode 100644
index 0000000..401e493
--- /dev/null
+++ b/docs/docker-stack/docker-examples/extending/add-pypi-packages/Dockerfile
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Dockerfile. It is not intended for PRODUCTION use
+# [START Dockerfile]
+FROM apache/airflow:2.0.1
+RUN pip install --no-cache-dir lxml
+# [END Dockerfile]
diff --git 
a/docs/docker-stack/docker-examples/extending/embedding-dags/Dockerfile 
b/docs/docker-stack/docker-examples/extending/embedding-dags/Dockerfile
new file mode 100644
index 0000000..9213729
--- /dev/null
+++ b/docs/docker-stack/docker-examples/extending/embedding-dags/Dockerfile
@@ -0,0 +1,22 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Dockerfile. It is not intended for PRODUCTION use
+# [START Dockerfile]
+FROM apache/airflow:2.0.1
+
+COPY --chown=airflow:root test_dag.py /opt/airflow/dags
+
+# [END Dockerfile]
diff --git 
a/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py 
b/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py
new file mode 100644
index 0000000..467c8c3
--- /dev/null
+++ b/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# [START dag]
+"""This dag only runs some simple tasks to test Airflow's task execution."""
+from datetime import datetime, timedelta
+
+from airflow.models.dag import DAG
+from airflow.operators.dummy import DummyOperator
+from airflow.utils.dates import days_ago
+
+now = datetime.now()
+now_to_the_hour = (now - timedelta(0, 0, 0, 0, 0, 3)).replace(minute=0, 
second=0, microsecond=0)
+START_DATE = now_to_the_hour
+DAG_NAME = 'test_dag_v1'
+
+default_args = {'owner': 'airflow', 'depends_on_past': True, 'start_date': 
days_ago(2)}
+dag = DAG(DAG_NAME, schedule_interval='*/10 * * * *', 
default_args=default_args)
+
+run_this_1 = DummyOperator(task_id='run_this_1', dag=dag)
+run_this_2 = DummyOperator(task_id='run_this_2', dag=dag)
+run_this_2.set_upstream(run_this_1)
+run_this_3 = DummyOperator(task_id='run_this_3', dag=dag)
+run_this_3.set_upstream(run_this_2)
+# [END dag]
diff --git 
a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh 
b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh
new file mode 100755
index 0000000..e7a3699
--- /dev/null
+++ b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+cd "${AIRFLOW_SOURCES}"
+
+# [START download]
+rm docker-context-files/*.whl docker-context-files/*.tar.gz 
docker-context-files/*.txt || true
+
+curl -Lo "docker-context-files/constraints-3.7.txt" \
+    
https://raw.githubusercontent.com/apache/airflow/constraints-2.0.1/constraints-3.7.txt
+
+pip download --dest docker-context-files \
+    --constraint docker-context-files/constraints-3.7.txt  \
+    
"apache-airflow[async,aws,azure,celery,dask,elasticsearch,gcp,kubernetes,postgres,redis,slack,ssh,statsd,virtualenv]==2.0.1"
+# [END download]
+
+# [START build]
+docker build . \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="2.0.1" \
+    --build-arg INSTALL_MYSQL_CLIENT="false" \
+    --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \
+    --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="true" \
+    --build-arg 
AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.7.txt"
+# [END build]
diff --git a/scripts/ci/images/ci_run_prod_image_test.sh 
b/scripts/ci/images/ci_run_prod_image_test.sh
new file mode 100755
index 0000000..3039eca
--- /dev/null
+++ b/scripts/ci/images/ci_run_prod_image_test.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# shellcheck source=scripts/ci/libraries/_initialization.sh
+. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_initialization.sh"
+
+initialization::set_output_color_variables
+
+job_name=$1
+file=$2
+
+set +e
+
+if [[ ${file} == *".sh" ]]; then
+    "${file}"
+    res=$?
+elif [[ ${file} == *"Dockerfile" ]]; then
+    cd "$(dirname "${file}")" || exit 1
+    docker build . --tag "${job_name}"
+    res=$?
+    docker rmi --force "${job_name}"
+else
+    echo "Bad file ${file}. Should be either a Dockerfile or script"
+    exit 1
+fi
+# Print status to status file
+echo "${res}" >"${PARALLEL_JOB_STATUS}"
+
+echo
+# print status to log
+if [[ ${res} == "0" ]]; then
+    echo "${COLOR_GREEN}Extend PROD image test ${job_name} 
succeeded${COLOR_RESET}"
+else
+    echo "${COLOR_RED}Extend PROD image test ${job_name} failed${COLOR_RESET}"
+fi
+echo
diff --git a/scripts/ci/images/ci_test_examples_of_prod_image_building.sh 
b/scripts/ci/images/ci_test_examples_of_prod_image_building.sh
new file mode 100755
index 0000000..7e04535
--- /dev/null
+++ b/scripts/ci/images/ci_test_examples_of_prod_image_building.sh
@@ -0,0 +1,91 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# shellcheck source=scripts/ci/libraries/_script_init.sh
+. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
+
+SEMAPHORE_NAME="image_tests"
+export SEMAPHORE_NAME
+
+DOCKER_EXAMPLES_DIR=${AIRFLOW_SOURCES}/docs/docker-stack/docker-examples/
+export DOCKER_EXAMPLES_DIR
+
+# Launches parallel building of images. Redirects output to log set the right 
directories
+# $1 - name of the job
+# $2 - bash file to execute in parallel
+function run_image_test_job() {
+    local file=$1
+
+    local job_name=$2
+    mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job_name}"
+    export 
JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job_name}/stdout"
+    export 
PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job_name}/status"
+    parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \
+        --jobs "${MAX_PARALLEL_IMAGE_JOBS}" \
+            "$(dirname "${BASH_SOURCE[0]}")/ci_run_prod_image_test.sh" 
"${job_name}" "${file}" >"${JOB_LOG}" 2>&1
+}
+
+
+function test_images() {
+    if [[ ${CI=} == "true" ]]; then
+        echo
+        echo "Skipping the script builds on CI! "
+        echo "They take very long time to build."
+        echo
+    else
+        local scripts_to_test
+        scripts_to_test=$(find "${DOCKER_EXAMPLES_DIR}" -type f -name '*.sh' )
+        for file in ${scripts_to_test}
+        do
+            local job_name
+            job_name=$(basename "${file}")
+            run_image_test_job "${file}" "${job_name}"
+        done
+    fi
+    local dockerfiles_to_test
+    dockerfiles_to_test=$(find "${DOCKER_EXAMPLES_DIR}" -type f -name 
'Dockerfile' )
+    for file in ${dockerfiles_to_test}
+    do
+        local job_name
+        job_name="$(basename "$(dirname "${file}")")"
+        run_image_test_job "${file}" "${job_name}"
+    done
+
+}
+
+cd "${AIRFLOW_SOURCES}" || exit 1
+
+docker_engine_resources::get_available_cpus_in_docker
+
+# Building max for images in parlallel helps to conserve docker image space
+MAX_PARALLEL_IMAGE_JOBS=4
+export MAX_PARALLEL_IMAGE_JOBS
+
+parallel::make_sure_gnu_parallel_is_installed
+parallel::kill_stale_semaphore_locks
+parallel::initialize_monitoring
+
+start_end::group_start "Testing image building"
+
+parallel::monitor_progress
+
+test_images
+
+parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait
+start_end::group_end
+
+parallel::print_job_summary_and_return_status_code
diff --git a/scripts/ci/libraries/_build_images.sh 
b/scripts/ci/libraries/_build_images.sh
index fa11128..55801e2 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -820,6 +820,7 @@ function build_images::prepare_prod_build() {
             "--build-arg" "AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION}"
         )
         export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
+        export INSTALL_PROVIDERS_FROM_SOURCES="false"
         build_images::add_build_args_for_remote_install
     else
         # When no airflow version/reference is specified, production image is 
built either from the
diff --git a/scripts/ci/libraries/_docker_engine_resources.sh 
b/scripts/ci/libraries/_docker_engine_resources.sh
index 18b223d..b5283b3 100644
--- a/scripts/ci/libraries/_docker_engine_resources.sh
+++ b/scripts/ci/libraries/_docker_engine_resources.sh
@@ -28,24 +28,21 @@ function docker_engine_resources::print_overall_stats() {
 
 
 function docker_engine_resources::get_available_memory_in_docker() {
-    MEMORY_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash \
-        "${AIRFLOW_CI_IMAGE}" -c \
+    MEMORY_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash 
debian:buster-slim -c \
         'echo $(($(getconf _PHYS_PAGES) * $(getconf PAGE_SIZE) / (1024 * 
1024)))')
     echo "${COLOR_BLUE}Memory available for Docker${COLOR_RESET}: $(numfmt 
--to iec $((MEMORY_AVAILABLE_FOR_DOCKER * 1024 * 1024)))"
     export MEMORY_AVAILABLE_FOR_DOCKER
 }
 
 function docker_engine_resources::get_available_cpus_in_docker() {
-    CPUS_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash \
-        "${AIRFLOW_CI_IMAGE}" -c \
+    CPUS_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash 
debian:buster-slim -c \
         'grep -cE "cpu[0-9]+" </proc/stat')
     echo "${COLOR_BLUE}CPUS available for Docker${COLOR_RESET}: 
${CPUS_AVAILABLE_FOR_DOCKER}"
     export CPUS_AVAILABLE_FOR_DOCKER
 }
 
 function docker_engine_resources::get_available_disk_space_in_docker() {
-    DISK_SPACE_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash \
-        "${AIRFLOW_CI_IMAGE}" -c \
+    DISK_SPACE_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash 
debian:buster-slim -c \
         'df  / | tail -1 | awk '\''{print $4}'\')
     echo "${COLOR_BLUE}Disk space available for Docker${COLOR_RESET}: $(numfmt 
--to iec $((DISK_SPACE_AVAILABLE_FOR_DOCKER * 1024)))"
     export DISK_SPACE_AVAILABLE_FOR_DOCKER
diff --git a/scripts/ci/libraries/_initialization.sh 
b/scripts/ci/libraries/_initialization.sh
index 5e38f1e..a0723b9 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -193,6 +193,7 @@ function 
initialization::initialize_files_for_rebuild_check() {
         "Dockerfile.ci"
         ".dockerignore"
         "scripts/docker/compile_www_assets.sh"
+        "scripts/docker/common.sh"
         "scripts/docker/install_additional_dependencies.sh"
         "scripts/docker/install_airflow.sh"
         "scripts/docker/install_airflow_from_branch_tip.sh"
diff --git a/scripts/ci/libraries/_parallel.sh 
b/scripts/ci/libraries/_parallel.sh
index 09c3121..dfe1c4d 100644
--- a/scripts/ci/libraries/_parallel.sh
+++ b/scripts/ci/libraries/_parallel.sh
@@ -16,12 +16,12 @@
 # specific language governing permissions and limitations
 # under the License.
 
+
+# Require SEMAPHORE_NAME
+
 function parallel::initialize_monitoring() {
     PARALLEL_MONITORED_DIR="$(mktemp -d)"
     export PARALLEL_MONITORED_DIR
-
-    PARALLEL_JOBLOG="$(mktemp)"
-    export PARALLEL_JOBLOG
 }
 
 function parallel::make_sure_gnu_parallel_is_installed() {
@@ -53,6 +53,7 @@ function parallel::kill_stale_semaphore_locks() {
 
 # Periodical loop to print summary of all the processes run by parallel
 function parallel::monitor_loop() {
+    trap 'exit 0' TERM
     echo
     echo "Start monitoring of parallel execution in ${PARALLEL_MONITORED_DIR} 
directory."
     echo
@@ -79,16 +80,13 @@ function parallel::monitor_loop() {
             echo
         done
         echo
-        echo "${COLOR_YELLOW}########### Monitoring progress end: 
${progress_report_number} #################${COLOR_RESET}}"
+        echo "${COLOR_YELLOW}########### Monitoring progress end: 
${progress_report_number} #################${COLOR_RESET}"
         echo
         end_time=${SECONDS}
         echo "${COLOR_YELLOW}############## $((end_time - start_time)) seconds 
passed since start ####################### ${COLOR_RESET}"
         sleep 10
         progress_report_number=$((progress_report_number + 1))
     done
-    echo "${COLOR_BLUE}########### STATISTICS #################"
-    docker_engine_resources::print_overall_stats
-    echo "########### STATISTICS #################${COLOR_RESET}"
 }
 
 # Monitors progress of parallel execution and periodically summarizes stdout 
entries created by
@@ -96,8 +94,6 @@ function parallel::monitor_loop() {
 # parameter to GNU parallel execution.
 function parallel::monitor_progress() {
     echo "Parallel results are stored in: ${PARALLEL_MONITORED_DIR}"
-    echo "Parallel joblog is stored in: ${PARALLEL_JOBLOG}"
-
     parallel::monitor_loop 2>/dev/null &
 
     # shellcheck disable=SC2034
@@ -108,5 +104,59 @@ function parallel::monitor_progress() {
 
 
 function parallel::kill_monitor() {
-    kill -9 ${PARALLEL_MONITORING_PID} >/dev/null 2>&1 || true
+    kill ${PARALLEL_MONITORING_PID} >/dev/null 2>&1 || true
+}
+
+# Outputs logs for successful test type
+# $1 test type
+function parallel::output_log_for_successful_job(){
+    local job=$1
+    local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}"
+    start_end::group_start "${COLOR_GREEN}Output for successful 
${job}${COLOR_RESET}"
+    echo "${COLOR_GREEN}##### The ${job} succeeded ##### ${COLOR_RESET}"
+    echo
+    cat "${log_dir}"/stdout
+    echo
+    echo "${COLOR_GREEN}##### The ${job} succeeded ##### ${COLOR_RESET}"
+    echo
+    start_end::group_end
+}
+
+# Outputs logs for failed test type
+# $1 test type
+function parallel::output_log_for_failed_job(){
+    local job=$1
+    local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}"
+    start_end::group_start "${COLOR_RED}Output: for failed 
${job}${COLOR_RESET}"
+    echo "${COLOR_RED}##### The ${job} failed ##### ${COLOR_RESET}"
+    echo
+    cat "${log_dir}"/stdout
+    echo
+    echo
+    echo "${COLOR_RED}##### The ${job} failed ##### ${COLOR_RESET}"
+    echo
+    start_end::group_end
+}
+
+# Prints summary of jobs and returns status:
+# 0 - all jobs succeeded (SKIPPED_FAILED_JOBS is not counted)
+# >0 - number of failed jobs (except Quarantine)
+function parallel::print_job_summary_and_return_status_code() {
+    local return_code="0"
+    local job
+    for job_path in "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/"*
+    do
+        job="$(basename "${job_path}")"
+        status=$(cat 
"${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}/status")
+        if [[ ${status} == "0" ]]; then
+            parallel::output_log_for_successful_job "${job}"
+        else
+            parallel::output_log_for_failed_job "${job}"
+            # SKIPPED_FAILED_JOB failure does not trigger whole test failure
+            if [[ ${SKIPPED_FAILED_JOB=} != "${job}" ]]; then
+                return_code=$((return_code + 1))
+            fi
+        fi
+    done
+    return "${return_code}"
 }
diff --git a/scripts/ci/testing/ci_run_airflow_testing.sh 
b/scripts/ci/testing/ci_run_airflow_testing.sh
index 1cd1c36..8286874 100755
--- a/scripts/ci/testing/ci_run_airflow_testing.sh
+++ b/scripts/ci/testing/ci_run_airflow_testing.sh
@@ -20,6 +20,9 @@
 RUN_TESTS="true"
 export RUN_TESTS
 
+SKIPPED_FAILED_JOB="Quarantined"
+export SKIPPED_FAILED_JOB
+
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
@@ -167,58 +170,6 @@ function run_test_types_in_parallel() {
     start_end::group_end
 }
 
-# Outputs logs for successful test type
-# $1 test type
-function output_log_for_successful_test_type(){
-    local test_type=$1
-    local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${test_type}"
-    start_end::group_start "${COLOR_GREEN}Output for successful 
${test_type}${COLOR_RESET}"
-    echo "${COLOR_GREEN}##### Test type ${test_type} succeeded ##### 
${COLOR_RESET}"
-    echo
-    cat "${log_dir}"/stdout
-    echo
-    echo "${COLOR_GREEN}##### Test type ${test_type} succeeded ##### 
${COLOR_RESET}"
-    echo
-    start_end::group_end
-}
-
-# Outputs logs for failed test type
-# $1 test type
-function output_log_for_failed_test_type(){
-    local test_type=$1
-    local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${test_type}"
-    start_end::group_start "${COLOR_RED}Output: for failed 
${test_type}${COLOR_RESET}"
-    echo "${COLOR_RED}##### Test type ${test_type} failed ##### ${COLOR_RESET}"
-    echo
-    cat "${log_dir}"/stdout
-    echo
-    echo
-    echo "${COLOR_RED}##### Test type ${test_type} failed ##### ${COLOR_RESET}"
-    echo
-    start_end::group_end
-}
-
-# Prints summary of tests and returns status:
-# 0 - all test types succeeded (Quarantine is not counted)
-# >0 - number of failed test types (except Quarantine)
-function print_test_summary_and_return_test_status_code() {
-    local return_code="0"
-    local test_type
-    for test_type in ${TEST_TYPES}
-    do
-        status=$(cat 
"${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${test_type}/status")
-        if [[ ${status} == "0" ]]; then
-            output_log_for_successful_test_type "${test_type}"
-        else
-            output_log_for_failed_test_type "${test_type}"
-            # Quarantined tests failure does not trigger whole test failure
-            if [[ ${TEST_TYPE} != "Quarantined" ]]; then
-                return_code=$((return_code + 1))
-            fi
-        fi
-    done
-    return "${return_code}"
-}
 
 export MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN=33000
 
@@ -236,8 +187,6 @@ export 
MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN=33000
 #   * MEMORY_AVAILABLE_FOR_DOCKER - memory that is available in docker (set by 
cleanup_runners)
 #
 function run_all_test_types_in_parallel() {
-    local test_type
-
     cleanup_runner
 
     start_end::group_start "Determine how to run the tests"
@@ -278,7 +227,7 @@ function run_all_test_types_in_parallel() {
     fi
     set -e
     # this will exit with error code in case some of the non-Quarantined tests 
failed
-    print_test_summary_and_return_test_status_code
+    parallel::print_job_summary_and_return_status_code
 }
 
 build_images::prepare_ci_build
diff --git a/scripts/docker/common.sh b/scripts/docker/common.sh
new file mode 100755
index 0000000..28307e3
--- /dev/null
+++ b/scripts/docker/common.sh
@@ -0,0 +1,63 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+set -euo pipefail
+
+test -v INSTALL_MYSQL_CLIENT
+test -v AIRFLOW_INSTALL_USER_FLAG
+test -v AIRFLOW_REPO
+test -v AIRFLOW_BRANCH
+test -v AIRFLOW_PIP_VERSION
+
+set -x
+
+function common::get_airflow_version_specification() {
+    if [[ -z ${AIRFLOW_VERSION_SPECIFICATION}
+        && -n ${AIRFLOW_VERSION}
+        && ${AIRFLOW_INSTALLATION_METHOD} != "." ]]; then
+        AIRFLOW_VERSION_SPECIFICATION="==${AIRFLOW_VERSION}"
+    fi
+}
+
+function common::get_constraints_location() {
+    # auto-detect Airflow-constraint reference and location
+    if [[ -z "${AIRFLOW_CONSTRAINTS_REFERENCE}" ]]; then
+        if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
+            # All types of references/versions match this regexp for 1.10 
series
+            # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} 
matches last
+            # minor digit of version and it's length is 0 for v1_10_test, 1 
for 1.10.9 and 2 for 1.10.10+
+            AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]}
+            if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then
+                # For v1_10_* branches use constraints-1-10 branch
+                AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10
+            else
+                AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}
+            fi
+        elif  [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then
+            AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}
+        else
+            AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}
+        fi
+    fi
+
+    if [[ -z ${AIRFLOW_CONSTRAINTS_LOCATION} ]]; then
+        local 
constraints_base="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${AIRFLOW_CONSTRAINTS_REFERENCE}";
+        local python_version
+        python_version="$(python --version 2>/dev/stdout | cut -d " " -f 2 | 
cut -d "." -f 1-2)"
+        
AIRFLOW_CONSTRAINTS_LOCATION="${constraints_base}/${AIRFLOW_CONSTRAINTS}-${python_version}.txt"
+    fi
+}
diff --git a/scripts/docker/compile_www_assets.sh 
b/scripts/docker/compile_www_assets.sh
index 04157b6..e303f51 100755
--- a/scripts/docker/compile_www_assets.sh
+++ b/scripts/docker/compile_www_assets.sh
@@ -17,9 +17,6 @@
 # under the License.
 # shellcheck disable=SC2086
 set -euo pipefail
-
-test -v PYTHON_MAJOR_MINOR_VERSION
-
 set -x
 
 # Installs additional dependencies passed as Argument to the Docker build 
command
@@ -31,7 +28,7 @@ function compile_www_assets() {
     md5sum_file="static/dist/sum.md5"
     readonly md5sum_file
     local airflow_site_package
-    
airflow_site_package="/root/.local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow"
+    airflow_site_package="$(python -m site --user-site)"
     local www_dir=""
     if [[ -f "${airflow_site_package}/www_rbac/package.json" ]]; then
         www_dir="${airflow_site_package}/www_rbac"
diff --git a/scripts/docker/install_airflow.sh 
b/scripts/docker/install_airflow.sh
index 5f1e9d9..bfcc7e9 100755
--- a/scripts/docker/install_airflow.sh
+++ b/scripts/docker/install_airflow.sh
@@ -26,17 +26,8 @@
 #                                 dependencies (with 
EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS added)
 #
 # shellcheck disable=SC2086
-set -euo pipefail
-
-test -v AIRFLOW_INSTALLATION_METHOD
-test -v AIRFLOW_INSTALL_EDITABLE_FLAG
-test -v AIRFLOW_INSTALL_USER_FLAG
-test -v INSTALL_MYSQL_CLIENT
-test -v UPGRADE_TO_NEWER_DEPENDENCIES
-test -v CONTINUE_ON_PIP_CHECK_FAILURE
-test -v AIRFLOW_CONSTRAINTS_LOCATION
-
-set -x
+# shellcheck source=scripts/docker/common.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
 
 function install_airflow() {
     # Sanity check for editable installation mode.
@@ -87,6 +78,11 @@ function install_airflow() {
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade 
"pip==${AIRFLOW_PIP_VERSION}"
         pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE}
     fi
+
 }
 
+common::get_airflow_version_specification
+
+common::get_constraints_location
+
 install_airflow
diff --git a/scripts/docker/install_airflow_from_branch_tip.sh 
b/scripts/docker/install_airflow_from_branch_tip.sh
index 3741055..6e34d05 100755
--- a/scripts/docker/install_airflow_from_branch_tip.sh
+++ b/scripts/docker/install_airflow_from_branch_tip.sh
@@ -26,16 +26,9 @@
 #
 # If INSTALL_MYSQL_CLIENT is set to false, mysql extra is removed
 #
-set -euo pipefail
+# shellcheck source=scripts/docker/common.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
 
-test -v INSTALL_MYSQL_CLIENT
-test -v AIRFLOW_INSTALL_USER_FLAG
-test -v AIRFLOW_REPO
-test -v AIRFLOW_BRANCH
-test -v AIRFLOW_CONSTRAINTS_LOCATION
-test -v AIRFLOW_PIP_VERSION
-
-set -x
 
 function install_airflow_from_branch_tip() {
     echo
@@ -57,4 +50,6 @@ function install_airflow_from_branch_tip() {
     pip uninstall --yes apache-airflow
 }
 
+common::get_constraints_location
+
 install_airflow_from_branch_tip
diff --git a/scripts/docker/install_from_docker_context_files.sh 
b/scripts/docker/install_from_docker_context_files.sh
index 48aa933..d1982cf 100755
--- a/scripts/docker/install_from_docker_context_files.sh
+++ b/scripts/docker/install_from_docker_context_files.sh
@@ -22,19 +22,13 @@
 # The packages are prepared from current sources and placed in the 
'docker-context-files folder
 # Then both airflow and provider packages are installed using those packages 
rather than
 # PyPI
-set -euo pipefail
-
-test -v AIRFLOW_EXTRAS
-test -v AIRFLOW_INSTALL_USER_FLAG
-test -v AIRFLOW_CONSTRAINTS_LOCATION
-test -v AIRFLOW_PIP_VERSION
-test -v CONTINUE_ON_PIP_CHECK_FAILURE
-test -v EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS
-test -v UPGRADE_TO_NEWER_DEPENDENCIES
-
-set -x
+# shellcheck source=scripts/docker/common.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
 
 function install_airflow_and_providers_from_docker_context_files(){
+    if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then
+        AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}
+    fi
     # Find Apache Airflow packages in docker-context files
     local reinstalling_apache_airflow_package
     reinstalling_apache_airflow_package=$(ls \
@@ -68,8 +62,12 @@ function 
install_airflow_and_providers_from_docker_context_files(){
         echo
         echo Force re-installing airflow and providers from local files with 
constraints and upgrade if needed
         echo
-        # Remove provider packages from constraint files because they are 
locally prepared
-        curl -L "${AIRFLOW_CONSTRAINTS_LOCATION}" | grep -ve '^apache-airflow' 
> /tmp/constraints.txt
+        if [[ ${AIRFLOW_CONSTRAINTS_LOCATION} == "/"* ]]; then
+            grep -ve '^apache-airflow' <"${AIRFLOW_CONSTRAINTS_LOCATION}" > 
/tmp/constraints.txt
+        else
+            # Remove provider packages from constraint files because they are 
locally prepared
+            curl -L "${AIRFLOW_CONSTRAINTS_LOCATION}" | grep -ve 
'^apache-airflow' > /tmp/constraints.txt
+        fi
         # force reinstall airflow + provider package local files with 
constraints + upgrade if needed
         pip install ${AIRFLOW_INSTALL_USER_FLAG} --force-reinstall \
             ${reinstalling_apache_airflow_package} 
${reinstalling_apache_airflow_providers_packages} \
@@ -106,5 +104,7 @@ install_all_other_packages_from_docker_context_files() {
     fi
 }
 
+common::get_constraints_location
+
 install_airflow_and_providers_from_docker_context_files
 install_all_other_packages_from_docker_context_files

Reply via email to