This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-2-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 37857e2c954436f77f4c6cd0c050f48b712224dc
Author: Jarek Potiuk <[email protected]>
AuthorDate: Tue Jan 18 22:59:30 2022 +0100

    Switch to 'buildkit' to build Airflow images (#20664)
    
    The "buildkit" is much more modern docker build mechanism and supports
    multiarchitecture builds which makes it suitable for our future ARM
    support, it also has nicer UI and much more sophisticated caching
    mechanisms as well as supports better multi-segment builds.
    
    BuildKit has been promoted to official for quite a while and it is
    rather stable now. Also we can now install BuildKit Plugin to docker
    that add capabilities of building and managin cache using dedicated
    builders (previously BuildKit cache was managed using rather
    complex external tools).
    
    This gives us an opportunity to vastly
    simplify our build scripts, because it has now much more robust caching
    mechanism than the old docker build (which forced us to pull images
    before using them as cache).
    
    We had a lot of complexity involved in efficient caching
    but with BuildKit all that can be vastly simplified and we can
    get rid of:
    
      * keeping base python images in our registry
      * keeping build segments for prod image in our registry
      * keeping manifest images in our registry
      * deciding when to pull or pull&build image (not needed now, we can
        always build image with --cache-from and buildkit will pull cached
        layers as needed
      * building the image when performing pre-commit (rather than that
        we simply encourage users to rebuild the image via breeze command)
      * pulling the images before building
      * separate 'build' cache kept in our registry (not needed any more
        as buildkit allows to keep cache for all segments of multi-segmented
        build in a single cache
      * the nice animated tty UI of buildkit eliminates the need of manual
        spinner
      * and a number of other complexities.
    
    Depends on #20238
    
    (cherry picked from commit ad28f69f74f4ba5defd6ad71c3d8b67d220c7fc2)
---
 .github/workflows/build-images.yml                 |   5 -
 .github/workflows/ci.yml                           |  24 +-
 BREEZE.rst                                         | 180 ++++-----
 CI.rst                                             |  16 -
 Dockerfile                                         |  32 +-
 Dockerfile.ci                                      |   5 -
 IMAGES.rst                                         | 167 +++-----
 airflow/www/ask_for_recompile_assets_if_needed.sh  |   2 +-
 breeze                                             | 136 +++----
 breeze-complete                                    |   4 +-
 dev/REFRESHING_CI_CACHE.md                         |   5 +
 dev/refresh_images.sh                              |   7 +-
 docs/docker-stack/build.rst                        |   4 +-
 .../customizing/add-build-essential-custom.sh      |   1 +
 .../docker-examples/customizing/custom-sources.sh  |   1 +
 .../customizing/github-different-repository.sh     |   1 +
 .../docker-examples/customizing/github-main.sh     |   1 +
 .../customizing/github-v2-2-test.sh                |   1 +
 .../customizing/pypi-dev-runtime-deps.sh           |   1 +
 .../customizing/pypi-extras-and-deps.sh            |   1 +
 .../customizing/pypi-selected-version.sh           |   1 +
 .../restricted/restricted_environments.sh          |   1 +
 docs/docker-stack/entrypoint.rst                   |   2 +-
 docs/docker-stack/recipes.rst                      |   2 +
 docs/helm-chart/manage-dags-files.rst              |   4 +-
 docs/helm-chart/quick-start.rst                    |   6 +-
 scripts/ci/images/ci_prepare_ci_image_on_ci.sh     |   5 -
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |   4 -
 scripts/ci/images/ci_push_ci_images.sh             |  10 +-
 scripts/ci/images/ci_push_production_images.sh     |  11 +-
 scripts/ci/images/ci_run_prod_image_test.sh        |   2 +-
 scripts/ci/libraries/_all_libs.sh                  |   2 -
 scripts/ci/libraries/_build_images.sh              | 436 ++++++---------------
 scripts/ci/libraries/_initialization.sh            |  20 +-
 scripts/ci/libraries/_md5sum.sh                    |  19 -
 scripts/ci/libraries/_push_pull_remove_images.sh   | 203 +---------
 scripts/ci/libraries/_spinner.sh                   |  55 ---
 scripts/ci/libraries/_verbosity.sh                 |   2 +-
 scripts/ci/pre_commit/pre_commit_ci_build.sh       |  30 +-
 scripts/ci/tools/build_dockerhub.sh                |   1 -
 scripts/docker/common.sh                           |   2 +-
 scripts/in_container/entrypoint_ci.sh              |   1 -
 42 files changed, 397 insertions(+), 1016 deletions(-)

diff --git a/.github/workflows/build-images.yml 
b/.github/workflows/build-images.yml
index bb70715..8252b27 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -29,7 +29,6 @@ permissions:
 env:
   MOUNT_SELECTED_LOCAL_SOURCES: "false"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
-  FORCE_PULL_IMAGES: "false"
   CHECK_IMAGE_FOR_REBUILD: "true"
   SKIP_CHECK_REMOTE_IMAGE: "true"
   DB_RESET: "true"
@@ -179,8 +178,6 @@ jobs:
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ 
needs.build-info.outputs.upgradeToNewerDependencies }}
       DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
-      CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: >
-        ${{ github.event_name == 'pull_request_target' && 'false' || 'true' }}
       outputs: ${{toJSON(needs.build-info.outputs) }}
     steps:
       - uses: actions/checkout@v2
@@ -250,8 +247,6 @@ jobs:
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ 
needs.build-info.outputs.upgradeToNewerDependencies }}
       DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
-      CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: >
-        ${{ github.event_name == 'pull_request_target' && 'false' || 'true' }}
       VERSION_SUFFIX_FOR_PYPI: ".dev0"
       INSTALL_PROVIDERS_FROM_SOURCES: >
         ${{ needs.build-info.outputs.defaultBranch == 'main' && 'true' || 
'false' }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bb07b25..1a038bd 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -30,7 +30,6 @@ permissions:
 env:
   MOUNT_SELECTED_LOCAL_SOURCES: "false"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
-  FORCE_PULL_IMAGES: "false"
   CHECK_IMAGE_FOR_REBUILD: "true"
   SKIP_CHECK_REMOTE_IMAGE: "true"
   DB_RESET: "true"
@@ -1287,12 +1286,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           branch: ${{ steps.constraints-branch.outputs.branch }}
           directory: "repo"
 
-  # Push images to GitHub Registry in Apache repository, if all tests are 
successful and build
-  # is executed as result of direct push to "main" or one of the "test" 
branches
-  # It actually rebuilds all images using just-pushed constraints if they 
changed
-  # It will also check if a new python image was released and will pull the 
latest one if needed
-  # Same as build-images.yaml
-  push-images-to-github-registry:
+  # Push BuildX cache to GitHub Registry in Apache repository, if all tests 
are successful and build
+  # is executed as result of direct push to "main" or one of the "vX-Y-test" 
branches
+  # It rebuilds all images using just-pushed constraints using buildx and 
pushes them to registry
+  # It will automatically check if a new python image was released and will 
pull the latest one if needed
+  push-buildx-cache-to-github-registry:
     permissions:
       packages: write
     timeout-minutes: 10
@@ -1303,7 +1301,9 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       - constraints
       - docs
     # Only run it for direct pushes and scheduled builds
-    if: github.event_name == 'push' || github.event_name == 'schedule'
+    if: >
+      (github.event_name == 'push' || github.event_name == 'schedule')
+      && github.repository == 'apache/airflow'
     strategy:
       matrix:
         python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) 
}}
@@ -1317,11 +1317,9 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       # a new python image, we will rebuild it from scratch (same as during 
the "build-images.ci")
       GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
       GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
-      PUSH_PYTHON_BASE_IMAGE: "true"
-      FORCE_PULL_IMAGES: "true"
-      CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: "true"
       GITHUB_REGISTRY_WAIT_FOR_IMAGE: "false"
       UPGRADE_TO_NEWER_DEPENDENCIES: "false"
+      PREPARE_BUILDX_CACHE: "true"
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -1339,7 +1337,3 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
         env:
           VERSION_SUFFIX_FOR_PYPI: ".dev0"
-      - name: "Push CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:latest"
-        run: ./scripts/ci/images/ci_push_ci_images.sh
-      - name: "Push PROD images ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:latest"
-        run: ./scripts/ci/images/ci_push_production_images.sh
diff --git a/BREEZE.rst b/BREEZE.rst
index 31babf2..e922ee8 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1146,10 +1146,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
     shell                                    [Default] Enters interactive 
shell in the container
     build-docs                               Builds documentation in the 
container
     build-image                              Builds CI or Production docker 
image
+    prepare-build-cache                      Prepares CI or Production build 
cache
     cleanup-image                            Cleans up the container image 
created
     exec                                     Execs into running breeze 
container in new terminal
     generate-constraints                     Generates pinned constraint files
-    push-image                               Pushes images to registry
     initialize-local-virtualenv              Initializes local virtualenv
     prepare-airflow-packages                 Prepares airflow packages
     setup-autocomplete                       Sets up autocomplete for breeze
@@ -1254,10 +1254,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
            '--build-cache-local' or '-build-cache-pulled', or 
'--build-cache-none'
 
         Choosing whether to force pull images or force build the image:
-            '--force-build-image', '--force-pull-image'
-
-        Checking if the base python image has been updated:
-            '--check-if-base-python-image-updated'
+            '--force-build-image'
 
         You can also pass '--production-image' flag to build production image 
rather than CI image.
 
@@ -1280,7 +1277,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
   -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
           Installs Airflow directly from reference in GitHub when building 
PROD image.
-          This can be a GitHub branch like main or v2-2-test, or a tag like 
2.2.0rc1.
+          This can be a GitHub branch like main or v2-1-test, or a tag like 
2.1.0a1.
 
   --installation-method INSTALLATION_METHOD
           Method of installing Airflow in PROD image - either from the sources 
('.')
@@ -1300,17 +1297,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           automatically for the first time or when changes are detected in
           package-related files, but you can force it using this flag.
 
-  -P, --force-pull-images
-          Forces pulling of images from GitHub Container Registry before 
building to populate cache.
-          The images are pulled by default only for the first time you run the
-          environment, later the locally build images are used as cache.
-
-  --check-if-base-python-image-updated
-          Checks if Python base image from DockerHub has been updated vs the 
current python base
-          image we store in GitHub Container Registry. Python images are 
updated regularly with
-          security fixes, this switch will check if a new one has been 
released and will pull and
-          prepare a new base python based on the latest one.
-
   --cleanup-docker-context-files
           Removes whl and tar.gz files created in docker-context-files before 
running the command.
           In case there are some files there it unnecessarily increases the 
context size and
@@ -1458,6 +1444,74 @@ This is the current syntax for  `./breeze <./breeze>`_:
   
####################################################################################################
 
 
+  Detailed usage for command: prepare-build-cache
+
+
+  breeze prepare-build-cache [FLAGS]
+
+        Prepares build cache (CI or production) without entering the 
container. You can pass
+        additional options to this command, such as:
+
+        Choosing python version:
+          '--python'
+
+        You can also pass '--production-image' flag to build production image 
rather than CI image.
+
+        For GitHub repository, the '--github-repository' can be used to choose 
repository
+        to pull/push images. Cleanup docker context files and pull cache are 
forced. This command
+        requires buildx to be installed.
+
+  Flags:
+
+  -p, --python PYTHON_MAJOR_MINOR_VERSION
+          Python version used for the image. This is always major/minor 
version.
+
+          One of:
+
+                 3.7 3.8 3.9 3.6
+
+  -a, --install-airflow-version INSTALL_AIRFLOW_VERSION
+          Uses different version of Airflow when building PROD image.
+
+                 2.0.2 2.0.1 2.0.0 wheel sdist
+
+  -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
+          Installs Airflow directly from reference in GitHub when building 
PROD image.
+          This can be a GitHub branch like main or v2-1-test, or a tag like 
2.1.0a1.
+
+  --installation-method INSTALLATION_METHOD
+          Method of installing Airflow in PROD image - either from the sources 
('.')
+          or from package 'apache-airflow' to install from PyPI.
+          Default in Breeze is to install from sources. One of:
+
+                 . apache-airflow
+
+  --upgrade-to-newer-dependencies
+          Upgrades PIP packages to latest versions available without looking 
at the constraints.
+
+  -I, --production-image
+          Use production image for entering the environment and builds (not 
for tests).
+
+  -g, --github-repository GITHUB_REPOSITORY
+          GitHub repository used to pull, push images.
+          Default: apache/airflow.
+
+  -v, --verbose
+          Show verbose information about executed docker, kind, kubectl, helm 
commands. Useful for
+          debugging - when you run breeze with --verbose flags you will be 
able to see the commands
+          executed under the hood and copy&paste them to your terminal to 
debug them more easily.
+
+          Note that you can further increase verbosity and see all the 
commands executed by breeze
+          by running 'export VERBOSE_COMMANDS="true"' before running breeze.
+
+  --dry-run-docker
+          Only show docker commands to execute instead of actually executing 
them. The docker
+          commands are printed in yellow color.
+
+
+  
####################################################################################################
+
+
   Detailed usage for command: cleanup-image
 
 
@@ -1559,61 +1613,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
   
####################################################################################################
 
 
-  Detailed usage for command: push-image
-
-
-  breeze push_image [FLAGS]
-
-        Pushes images to GitHub registry.
-
-        You can add --github-repository to push to a different 
repository/organisation.
-        You can add --github-image-id <COMMIT_SHA> in case you want to push 
image with specific
-        SHA tag.
-        You can also add --production-image flag to switch to production image 
(default is CI one)
-
-        Examples:
-
-        'breeze push-image' or
-        'breeze push-image --production-image' - to push production image or
-        'breeze push-image \
-              --github-repository user/airflow' - to push to your user's fork
-        'breeze push-image \
-              --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - to 
push with COMMIT_SHA
-
-  Flags:
-
-  -g, --github-repository GITHUB_REPOSITORY
-          GitHub repository used to pull, push images.
-          Default: apache/airflow.
-
-
-
-
-  -s, --github-image-id COMMIT_SHA
-          <COMMIT_SHA> of the image. Images in GitHub registry are stored with 
those
-          to be able to easily find the image for particular CI runs. Once you 
know the
-          <COMMIT_SHA>, you can specify it in github-image-id flag and Breeze 
will
-          automatically pull and use that image so that you can easily 
reproduce a problem
-          that occurred in CI.
-
-          Default: latest.
-
-  -v, --verbose
-          Show verbose information about executed docker, kind, kubectl, helm 
commands. Useful for
-          debugging - when you run breeze with --verbose flags you will be 
able to see the commands
-          executed under the hood and copy&paste them to your terminal to 
debug them more easily.
-
-          Note that you can further increase verbosity and see all the 
commands executed by breeze
-          by running 'export VERBOSE_COMMANDS="true"' before running breeze.
-
-  --dry-run-docker
-          Only show docker commands to execute instead of actually executing 
them. The docker
-          commands are printed in yellow color.
-
-
-  
####################################################################################################
-
-
   Detailed usage for command: initialize-local-virtualenv
 
 
@@ -1903,17 +1902,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           automatically for the first time or when changes are detected in
           package-related files, but you can force it using this flag.
 
-  -P, --force-pull-images
-          Forces pulling of images from GitHub Container Registry before 
building to populate cache.
-          The images are pulled by default only for the first time you run the
-          environment, later the locally build images are used as cache.
-
-  --check-if-base-python-image-updated
-          Checks if Python base image from DockerHub has been updated vs the 
current python base
-          image we store in GitHub Container Registry. Python images are 
updated regularly with
-          security fixes, this switch will check if a new one has been 
released and will pull and
-          prepare a new base python based on the latest one.
-
   --cleanup-docker-context-files
           Removes whl and tar.gz files created in docker-context-files before 
running the command.
           In case there are some files there it unnecessarily increases the 
context size and
@@ -2194,11 +2182,11 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
                  all airflow-config-yaml airflow-providers-available 
airflow-provider-yaml-files-ok
                  autoflake base-operator bats-tests bats-in-container-tests 
black blacken-docs
-                 boring-cyborg build build-providers-dependencies 
check-apache-license check-builtin-literals
-                 check-executables-have-shebangs check-extras-order 
check-hooks-apply
-                 check-integrations check-merge-conflict check-xml 
daysago-import-check
-                 debug-statements detect-private-key doctoc 
dont-use-safe-filter end-of-file-fixer
-                 fix-encoding-pragma flake8 flynt forbid-tabs helm-lint 
identity
+                 boring-cyborg build build-providers-dependencies 
check-apache-license
+                 check-builtin-literals check-executables-have-shebangs 
check-extras-order
+                 check-hooks-apply check-integrations check-merge-conflict 
check-xml
+                 daysago-import-check debug-statements detect-private-key 
doctoc dont-use-safe-filter
+                 end-of-file-fixer fix-encoding-pragma flake8 flynt 
forbid-tabs helm-lint identity
                  incorrect-use-of-LoggingMixin insert-license isort 
json-schema language-matters
                  lint-dockerfile lint-openapi markdownlint mermaid 
mixed-line-ending mypy mypy-helm
                  no-providers-in-core-examples no-relative-imports 
persist-credentials-disabled
@@ -2208,7 +2196,8 @@ This is the current syntax for  `./breeze <./breeze>`_:
                  pyupgrade restrict-start_date rst-backticks setup-order 
setup-extra-packages
                  shellcheck sort-in-the-wild sort-spelling-wordlist stylelint 
trailing-whitespace
                  ui-lint update-breeze-file update-extras 
update-local-yml-file update-setup-cfg-file
-                 update-supported-versions update-versions 
verify-db-migrations-documented version-sync www-lint yamllint yesqa
+                 update-supported-versions update-versions 
verify-db-migrations-documented
+                 version-sync www-lint yamllint yesqa
 
         You can pass extra arguments including options to the pre-commit 
framework as
         <EXTRA_ARGS> passed after --. For example:
@@ -2453,7 +2442,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
   -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
           Installs Airflow directly from reference in GitHub when building 
PROD image.
-          This can be a GitHub branch like main or v2-2-test, or a tag like 
2.2.0rc1.
+          This can be a GitHub branch like main or v2-1-test, or a tag like 
2.1.0a1.
 
   --installation-method INSTALLATION_METHOD
           Method of installing Airflow in PROD image - either from the sources 
('.')
@@ -2496,17 +2485,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           automatically for the first time or when changes are detected in
           package-related files, but you can force it using this flag.
 
-  -P, --force-pull-images
-          Forces pulling of images from GitHub Container Registry before 
building to populate cache.
-          The images are pulled by default only for the first time you run the
-          environment, later the locally build images are used as cache.
-
-  --check-if-base-python-image-updated
-          Checks if Python base image from DockerHub has been updated vs the 
current python base
-          image we store in GitHub Container Registry. Python images are 
updated regularly with
-          security fixes, this switch will check if a new one has been 
released and will pull and
-          prepare a new base python based on the latest one.
-
   --cleanup-docker-context-files
           Removes whl and tar.gz files created in docker-context-files before 
running the command.
           In case there are some files there it unnecessarily increases the 
context size and
diff --git a/CI.rst b/CI.rst
index 0ea9d6c..766d1bc 100644
--- a/CI.rst
+++ b/CI.rst
@@ -149,22 +149,6 @@ You can use those variables when you try to reproduce the 
build locally.
 
+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+
 |                                                           Force variables    
                                                       |
 
+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+
-| ``FORCE_PULL_IMAGES``                   |    true     |    true      |    
true    | Determines if images are force-pulled,          |
-|                                         |             |              |       
     | no matter if they are already present           |
-|                                         |             |              |       
     | locally. This includes not only the             |
-|                                         |             |              |       
     | CI/PROD images but also the Python base         |
-|                                         |             |              |       
     | images. Note that if Python base images         |
-|                                         |             |              |       
     | change, also the CI and PROD images             |
-|                                         |             |              |       
     | need to be fully rebuild unless they were       |
-|                                         |             |              |       
     | already built with that base Python             |
-|                                         |             |              |       
     | image. This is false for local development      |
-|                                         |             |              |       
     | to avoid often pulling and rebuilding           |
-|                                         |             |              |       
     | the image. It is true for CI workflow in        |
-|                                         |             |              |       
     | case waiting from images is enabled             |
-|                                         |             |              |       
     | as the images needs to be force-pulled from     |
-|                                         |             |              |       
     | GitHub Registry, but it is set to               |
-|                                         |             |              |       
     | false when waiting for images is disabled.      |
-+-----------------------------------------+-------------+--------------+------------+-------------------------------------------------+
 | ``FORCE_BUILD_IMAGES``                  |    false    |    false     |    
false   | Forces building images. This is generally not   |
 |                                         |             |              |       
     | very useful in CI as in CI environment image    |
 |                                         |             |              |       
     | is built or pulled only once, so there is no    |
diff --git a/Dockerfile b/Dockerfile
index 76b2d36..778b21b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -33,7 +33,9 @@
 #                        all the build essentials. This makes the image
 #                        much smaller.
 #
-ARG AIRFLOW_VERSION="2.2.0.dev0"
+# Use the same builder frontend version for everyone
+# syntax=docker/dockerfile:1.3
+ARG AIRFLOW_VERSION="2.2.4.dev0"
 ARG 
AIRFLOW_EXTRAS="amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,google,google_auth,grpc,hashicorp,http,ldap,microsoft.azure,mysql,odbc,pandas,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
 ARG ADDITIONAL_AIRFLOW_EXTRAS=""
 ARG ADDITIONAL_PYTHON_DEPS=""
@@ -327,34 +329,6 @@ RUN if [[ -f /docker-context-files/requirements.txt ]]; 
then \
         pip install --no-cache-dir --user -r 
/docker-context-files/requirements.txt; \
     fi
 
-ARG BUILD_ID
-ARG COMMIT_SHA
-ARG AIRFLOW_IMAGE_REPOSITORY
-ARG AIRFLOW_IMAGE_DATE_CREATED
-
-ENV BUILD_ID=${BUILD_ID} COMMIT_SHA=${COMMIT_SHA}
-
-LABEL org.apache.airflow.distro="debian" \
-  org.apache.airflow.distro.version="buster" \
-  org.apache.airflow.module="airflow" \
-  org.apache.airflow.component="airflow" \
-  org.apache.airflow.image="airflow-build-image" \
-  org.apache.airflow.version="${AIRFLOW_VERSION}" \
-  org.apache.airflow.build-image.build-id=${BUILD_ID} \
-  org.apache.airflow.build-image.commit-sha=${COMMIT_SHA} \
-  org.opencontainers.image.source=${AIRFLOW_IMAGE_REPOSITORY} \
-  org.opencontainers.image.created=${AIRFLOW_IMAGE_DATE_CREATED} \
-  org.opencontainers.image.authors="[email protected]" \
-  org.opencontainers.image.url="https://airflow.apache.org"; \
-  
org.opencontainers.image.documentation="https://airflow.apache.org/docs/docker-stack/index.html";
 \
-  org.opencontainers.image.version="${AIRFLOW_VERSION}" \
-  org.opencontainers.image.revision="${COMMIT_SHA}" \
-  org.opencontainers.image.vendor="Apache Software Foundation" \
-  org.opencontainers.image.licenses="Apache-2.0" \
-  org.opencontainers.image.ref.name="airflow-build-image" \
-  org.opencontainers.image.title="Build Image Segment for Production Airflow 
Image" \
-  org.opencontainers.image.description="Reference build-time dependencies 
image for production-ready Apache Airflow image"
-
 
##############################################################################################
 # This is the actual Airflow image - much smaller than the build one. We copy
 # installed Airflow and all it's dependencies from the build image to make it 
smaller.
diff --git a/Dockerfile.ci b/Dockerfile.ci
index d9e4477..fd6e3a8 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -291,11 +291,6 @@ RUN echo -e "\n\e[32mThe 'Running pip as the root user' 
warnings below are not v
         /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \
     fi
 
-# Generate random hex dump file so that we can determine whether it's faster 
to rebuild the image
-# using current cache (when our dump is the same as the remote onb) or better 
to pull
-# the new image (when it is different)
-RUN head -c 30 /dev/urandom | xxd -ps >/build-cache-hash
-
 # Copy package.json and yarn.lock to install node modules
 # this way even if other static check files change, node modules will not need 
to be installed
 # we want to keep node_modules so we can do this step separately from 
compiling assets
diff --git a/IMAGES.rst b/IMAGES.rst
index 12f0b91..48108ca 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -153,7 +153,6 @@ This will build the image using command similar to:
    them to appropriate format and workflow that your tool requires.
 
 
-
 You can also build production images from specific Git version via providing 
``--install-airflow-reference``
 parameter to Breeze (this time constraints are taken from the 
``constraints-main`` branch which is the
 HEAD of development for constraints):
@@ -173,8 +172,8 @@ You can also skip installing airflow and install it from 
locally provided files
 
 In this case you airflow and all packages (.whl files) should be placed in 
``docker-context-files`` folder.
 
-Using cache during builds
-=========================
+Using docker cache during builds
+================================
 
 Default mechanism used in Breeze for building CI images uses images pulled from
 GitHub Container Registry. This is done to speed up local builds and building 
images for CI runs - instead of
@@ -230,45 +229,41 @@ or
 Naming conventions
 ==================
 
-By default images are pulled and pushed from and to Github Container registry 
when you use Breeze's push-image
-or build commands.
-
-We are using GitHub Container Registry as build cache.The images are all in 
organization wide "apache/"
-namespace. We are adding "airflow-" as prefix for image names of all Airflow 
images.
-The images are linked to the repository via 
``org.opencontainers.image.source`` label in the image.
+By default images we are using cache for images in Github Container registry. 
We are using GitHub
+Container Registry as development image cache and CI registry for build images.
+The images are all in organization wide "apache/" namespace. We are adding 
"airflow-" as prefix for
+the image names of all Airflow images. The images are linked to the repository
+via ``org.opencontainers.image.source`` label in the image.
 
 See 
https://docs.github.com/en/packages/learn-github-packages/connecting-a-repository-to-a-package
 
 Naming convention for the GitHub packages.
 
-Images with a commit SHA (built for pull requests and pushes)
+Images with a commit SHA (built for pull requests and pushes). Those are 
images that are snapshot of the
+currently run build. They are built once per each build and pulled by each 
test job.
 
 .. code-block:: bash
 
   ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:<COMMIT_SHA>         - for CI 
images
   ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:<COMMIT_SHA>       - for 
production images
 
-We do not push Base Python images and prod-build images when we prepare COMMIT 
builds, because those
-images are never rebuilt locally, so there is no need to store base images 
specific for those builds.
 
-Latest images (pushed when main merge succeeds):
+The cache images (pushed when main merge succeeds) are kept with ``cache`` tag:
 
 .. code-block:: bash
 
-  ghcr.io/apache/airflow/<BRANCH>/python:<X.Y>-slim-buster        - for base 
Python images
-  ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:latest           - for CI 
images
-  ghcr.io/apache/airflow/<BRANCH>/ci-manifest/python<X.Y>:latest  - for CI 
Manifest images
-  ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:latest         - for 
production images
-  ghcr.io/apache/airflow/<BRANCH>/prod-build/python<X.Y>:latest   - for 
production build stage
+  ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:cache           - for CI 
images
+  ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:cache         - for 
production images
 
 You can see all the current GitHub images at 
`<https://github.com/apache/airflow/packages>`_
 
 You can read more about the CI configuration and how CI jobs are using GitHub 
images
 in `<CI.rst>`_.
 
-Note that you need to be committer and have the right to push to GitHub and 
you need to
-be logged in to the registry. Only committers can push images directly. You 
need to login with your
-Personal Access Token with "packages" write scope to be able to push to those 
repositories or pull from them
+Note that you need to be committer and have the right to refresh the images in 
the GitHub Registry with
+latest sources from main via (./dev/refresh_images.sh).
+Only committers can push images directly. You need to login with your Personal 
Access Token with
+"packages" write scope to be able to push to those repositories or pull from 
them
 in case of GitHub Packages.
 
 GitHub Container Registry
@@ -284,35 +279,20 @@ the images periodically and update them whenever new 
version of base Python is r
 However, occasionally, you might need to rebuild images locally and push them 
directly to the registries
 to refresh them.
 
-This can be done with ``Breeze`` command line which has easy-to-use tool to 
manage those images. For
-example:
-
-Force building Python 3.6 CI image using local cache and pushing it container 
registry:
-
-.. code-block:: bash
-
-  ./breeze build-image --python 3.6 --force-build-images 
--check-if-base-python-image-updated --build-cache-local
-  ./breeze push-image --python 3.6
-
-Building Python 3.8 CI image using cache pulled from GitHub Container Registry 
and pushing it back:
-
-.. code-block:: bash
 
-  ./breeze build-image --python 3.8
-  ./breeze push-image --python 3.8
 
-You can also pull and run images being result of a specific CI run in GitHub 
Actions. This is a powerful
-tool that allows to reproduce CI failures locally, enter the images and fix 
them much faster. It is enough
-to pass ``--github-image-id`` and the registry and Breeze will download and 
execute commands using
-the same image that was used during the CI tests.
+Every developer can also pull and run images being result of a specific CI run 
in GitHub Actions.
+This is a powerful tool that allows to reproduce CI failures locally, enter 
the images and fix them much
+faster. It is enough to pass ``--github-image-id`` and the registry and Breeze 
will download and execute
+commands using the same image that was used during the CI tests.
 
 For example this command will run the same Python 3.8 image as was used in 
build identified with
-9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit SHA  with enabled Kerberos 
integration.
+9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit SHA  with enabled rabbitmq 
integration.
 
 .. code-block:: bash
 
   ./breeze --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e \
-    --python 3.8 --integration kerberos
+    --python 3.8 --integration rabbitmq
 
 You can see more details and examples in `Breeze <BREEZE.rst>`_
 
@@ -331,9 +311,13 @@ Here just a few examples are presented which should give 
you general understandi
 This builds the production image in version 3.7 with additional airflow extras 
from 2.0.0 PyPI package and
 additional apt dev and runtime dependencies.
 
+It is recommended to build images with ``DOCKER_BUILDKIT=1`` variable
+(Breeze sets ``DOCKER_BUILDKIT=1`` variable automatically).
+
 .. code-block:: bash
 
-  docker build . -f Dockerfile.ci \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
     --build-arg ADDITIONAL_PYTHON_DEPS="pandas"
@@ -358,7 +342,8 @@ based on example in `this comment 
<https://github.com/apache/airflow/issues/8605
 
 .. code-block:: bash
 
-  docker build . -f Dockerfile.ci \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \
@@ -502,14 +487,18 @@ This builds the CI image in version 3.7 with default 
extras ("all").
 
 .. code-block:: bash
 
-  docker build . -f Dockerfile.ci --build-arg 
PYTHON_BASE_IMAGE="python:3.7-slim-buster" --tag my-image:0.0.1
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+     --pull \
+     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" --tag 
my-image:0.0.1
 
 
 This builds the CI image in version 3.6 with "gcp" extra only.
 
 .. code-block:: bash
 
-  docker build . -f Dockerfile.ci --build-arg 
PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg AIRFLOW_EXTRAS=gcp --tag my-image:0.0.1
 
 
@@ -517,101 +506,39 @@ This builds the CI image in version 3.6 with 
"apache-beam" extra added.
 
 .. code-block:: bash
 
-  docker build . -f Dockerfile.ci --build-arg 
PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="apache-beam" --tag my-image:0.0.1
 
 This builds the CI image in version 3.6 with "mssql" additional package added.
 
 .. code-block:: bash
 
-  docker build . -f Dockerfile.ci --build-arg 
PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg ADDITIONAL_PYTHON_DEPS="mssql" --tag my-image:0.0.1
 
 This builds the CI image in version 3.6 with "gcc" and "g++" additional apt 
dev dependencies added.
 
 .. code-block::
 
-  docker build . -f Dockerfile.ci --build-arg 
PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" --tag my-image:0.0.1
 
 This builds the CI image in version 3.6 with "jdbc" extra and 
"default-jre-headless" additional apt runtime dependencies added.
 
 .. code-block::
 
-  docker build . -f Dockerfile.ci --build-arg 
PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
+  DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \
+    --pull \
+    --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg AIRFLOW_EXTRAS=jdbc --build-arg 
ADDITIONAL_RUNTIME_DEPS="default-jre-headless" \
     --tag my-image:0.0.1
 
-CI Image manifests
-------------------
-
-Together with the main CI images we also build and push image manifests. Those 
manifests are very small images
-that contain only content of randomly generated file at the 'crucial' part of 
the CI image building.
-This is in order to be able to determine very quickly if the image in the 
docker registry has changed a
-lot since the last time. Unfortunately docker registry has no anonymous
-way of querying image details via API. You really need to download the image 
to inspect it.
-We workaround it in the way that always when we build the image we build a 
very small image manifest
-containing randomly generated UUID and push it to registry together with the 
main CI image.
-The tag for the manifest image reflects the image it refers to with added 
``-manifest`` suffix.
-The manifest image for ``ghcr.io/apache/airflow/main/ci/python3.6`` is named
-``ghcr.io/apache/airflow/main/ci-manifest/python3.6``.
-
-The image is quickly pulled (it is really, really small) when important files 
change and the content
-of the randomly generated UUID is compared with the one in our image. If the 
contents are different
-this means that the user should rebase to latest main and rebuild the image 
with pulling the image from
-the repo as this will likely be faster than rebuilding the image locally.
-
-The random UUID is generated right after pre-cached pip install is run - and 
usually it means that
-significant changes have been made to apt packages or even the base Python 
image has changed.
-
-Working with the images
-=======================
-
-Pulling the Latest Images
--------------------------
-
-Sometimes the image needs to be refreshed from the GitHub Container Registry - 
because you have an outdated
-version. You can do it via the ``--force-pull-images`` flag to force pulling 
the latest images.
-
-For production image:
-
-.. code-block:: bash
-
-  ./breeze build-image --force-pull-images --production-image
-
-For CI image Breeze automatically uses force pulling in case it determines 
that your image is very outdated,
-however you can also force it with the same flag.
-
-.. code-block:: bash
-
-  ./breeze build-image --force-pull-images
-
-Refreshing Base Python images
------------------------------
-
-Python base images are updated from time-to-time, usually as a result of 
implementing security fixes.
-When you build your image locally using ``docker build`` you use the version 
of image that you have locally.
-For image builds using ``breeze`` we use the image that is stored in our 
repository in order to use cache
-efficiently. However CI push build have ``CHECK_IF_BASE_PYTHON_IMAGE_UPDATED`` 
variable set to ``true``
-which checks if the image has been released and will pull it and rebuild it if 
needed
-
-.. code-block:: bash
-
-    #/bin/bash
-    export GITHUB_REPOSITORY="apache/airflow"
-    export FORCE_ANSWER_TO_QUESTIONS="true"
-    export CI="true"
-
-    for python_version in "3.6" "3.7" "3.8"
-    do
-            ./breeze build-image --python ${python_version} 
--build-cache-local \
-                    --check-if-python-base-image-updated --verbose
-            ./breeze build-image --python ${python_version} 
--build-cache-local \
-                    --production-image --verbose
-            ./breeze push-image
-            ./breeze push-image --production-image
-    done
-
 Running the CI image
 --------------------
 
diff --git a/airflow/www/ask_for_recompile_assets_if_needed.sh 
b/airflow/www/ask_for_recompile_assets_if_needed.sh
index 4fba5bc..d1a6f34 100755
--- a/airflow/www/ask_for_recompile_assets_if_needed.sh
+++ b/airflow/www/ask_for_recompile_assets_if_needed.sh
@@ -30,7 +30,7 @@ NO_COLOR='\033[0m'
 md5sum=$(find package.json yarn.lock static/css static/js -type f | sort | 
xargs md5sum)
 old_md5sum=$(cat "${MD5SUM_FILE}" 2>/dev/null || true)
 if [[ ${old_md5sum} != "${md5sum}" ]]; then
-    if [[ ${START_AIRFLOW} == "true" && ${USE_AIRFLOW_VERSION} == "" ]]; then
+    if [[ ${START_AIRFLOW:="false"} == "true" && ${USE_AIRFLOW_VERSION:=} == 
"" ]]; then
         echo
         echo -e "${YELLOW}Recompiling assets as they have changed and you need 
them for 'start_airflow' command${NO_COLOR}"
         echo
diff --git a/breeze b/breeze
index 8d53400..15aeaf6 100755
--- a/breeze
+++ b/breeze
@@ -72,7 +72,6 @@ export EXTRA_STATIC_CHECK_OPTIONS
 #    MAX_SCREEN_WIDTH
 #    SCREEN_WIDTH
 #    MOUNT_SELECTED_LOCAL_SOURCES
-#    FORCE_PULL_IMAGES
 #    FORWARD_CREDENTIALS
 #    DB_RESET
 #    START_AIRFLOW
@@ -114,14 +113,6 @@ function breeze::setup_default_breeze_constants() {
     # By default we do not mount all local Airflow sources
     export MOUNT_ALL_LOCAL_SOURCES="false"
 
-    # By default we only pull images if we do not have them locally.
-    # This can be overridden by '--force-pull-images' flag
-    export FORCE_PULL_IMAGES="false"
-
-    # By default we do not pull python base image. We should do that only when 
we run upgrade check in
-    # CI main and when we manually refresh the images to latest versions
-    export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="false"
-
     # Forward common host credentials to docker (gcloud, aws etc.).
     export FORWARD_CREDENTIALS="false"
 
@@ -172,9 +163,6 @@ function breeze::setup_default_breeze_constants() {
     # Can be overridden by '--force-build-images' flag.
     export FORCE_BUILD_IMAGES="false"
 
-    # When we push from breeze we always want to push base python images
-    export PUSH_PYTHON_BASE_IMAGE="true"
-
     # Determines whether to reinstall airflow at entering the image.
     export USE_AIRFLOW_VERSION=""
     # if set to true, the ci image will look for wheel packages in dist folder 
and will install them
@@ -1013,24 +1001,6 @@ function breeze::parse_arguments() {
             # if not set here, docker cached is determined later, depending on 
type of image to be built
             shift
             ;;
-        -P | --force-pull-images)
-            echo "Force pulling images before build. Uses pulled images as 
cache."
-            echo
-            export FORCE_PULL_IMAGES="true"
-            export FORCE_BUILD_IMAGES="true"
-            # if you want to force  build an image - assume you want to build 
it :)
-            export FORCE_ANSWER_TO_QUESTIONS="yes"
-            shift
-            ;;
-        --check-if-base-python-image-updated)
-            echo "Checks if base python image has been."
-            echo
-            export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="true"
-            export FORCE_BUILD_IMAGES="true"
-            # if you want to force  build an image - assume you want to build 
it :)
-            export FORCE_ANSWER_TO_QUESTIONS="yes"
-            shift
-            ;;
         -I | --production-image)
             export PRODUCTION_IMAGE="true"
             export SQLITE_URL=
@@ -1169,7 +1139,6 @@ function breeze::parse_arguments() {
             echo "You can specify --skip-mounting-local-sources to not mount 
local sources to get exact. "
             echo "behaviour as in the CI environment."
             echo
-            export FORCE_PULL_IMAGES="true"
             export GITHUB_REGISTRY_PULL_IMAGE_TAG="${2}"
             export GITHUB_REGISTRY_PUSH_IMAGE_TAG="${2}"
             export CHECK_IMAGE_FOR_REBUILD="false"
@@ -1310,6 +1279,19 @@ function breeze::parse_arguments() {
             echo
             shift
             ;;
+        prepare-build-cache)
+            last_subcommand="${1}"
+            command_to_run="prepare_build_cache"
+            export FORCE_ANSWER_TO_QUESTIONS="yes"
+            # and assume you want to build it no matter if it is needed
+            export FORCE_BUILD_IMAGES="true"
+            export PREPARE_BUILDX_CACHE="true"
+            export DOCKER_CACHE="pulled"
+            export CLEANUP_DOCKER_CONTEXT_FILES="true"
+            echo "Prepare buildx cache"
+            echo
+            shift
+            ;;
         cleanup-image)
             last_subcommand="${1}"
             echo "Cleanup the image"
@@ -1354,12 +1336,6 @@ function breeze::parse_arguments() {
             command_to_run="perform_prepare_provider_documentation"
             shift
             ;;
-        push-image)
-            last_subcommand="${1}"
-            command_to_run="perform_push_image"
-            export SKIP_CHECK_REMOTE_IMAGE="true"
-            shift
-            ;;
         initialize-local-virtualenv)
             last_subcommand="${1}"
             echo "Initializing local virtualenv"
@@ -1664,6 +1640,8 @@ function breeze::prepare_usage() {
     readonly USAGE_BUILD_DOCS
     export USAGE_BUILD_IMAGE="Builds CI or Production docker image"
     readonly USAGE_BUILD_DOCS
+    export USAGE_PREPARE_BUILD_CACHE="Prepares CI or Production build cache"
+    readonly USAGE_PREPARE_BUILD_CACHE
     export USAGE_CLEANUP_IMAGE="Cleans up the container image created"
     readonly USAGE_BUILD_DOCS
     export USAGE_DOCKER_COMPOSE="Executes specified docker-compose command"
@@ -1774,10 +1752,7 @@ ${CMDNAME} build-image [FLAGS]
          '--build-cache-local' or '-build-cache-pulled', or 
'--build-cache-none'
 
       Choosing whether to force pull images or force build the image:
-          '--force-build-image', '--force-pull-image'
-
-      Checking if the base python image has been updated:
-          '--check-if-base-python-image-updated'
+          '--force-build-image'
 
       You can also pass '--production-image' flag to build production image 
rather than CI image.
 
@@ -1793,6 +1768,29 @@ $(breeze::flag_pull_push_docker_images "no_show_sha")
 $(breeze::flag_verbosity)
 "
     readonly DETAILED_USAGE_BUILD_IMAGE
+    export DETAILED_USAGE_PREPARE_BUILD_CACHE="
+${CMDNAME} prepare-build-cache [FLAGS]
+
+      Prepares build cache (CI or production) without entering the container. 
You can pass
+      additional options to this command, such as:
+
+      Choosing python version:
+        '--python'
+
+      You can also pass '--production-image' flag to build production image 
rather than CI image.
+
+      For GitHub repository, the '--github-repository' can be used to choose 
repository
+      to pull/push images. Cleanup docker context files and pull cache are 
forced. This command
+      requires buildx to be installed.
+
+Flags:
+$(breeze::flag_airflow_variants)
+$(breeze::flag_build_different_airflow_version)
+$(breeze::flag_production_image)
+$(breeze::flag_pull_push_docker_images "no_show_sha")
+$(breeze::flag_verbosity)
+"
+    readonly DETAILED_USAGE_PREPARE_BUILD_CACHE
     export DETAILED_USAGE_CLEANUP_IMAGE="
 ${CMDNAME} cleanup-image [FLAGS]
 
@@ -1947,30 +1945,6 @@ $(breeze::flag_version_suffix)
 $(breeze::flag_verbosity)
 "
     readonly DETAILED_USAGE_PREPARE_PROVIDER_PACKAGES
-    export DETAILED_USAGE_PUSH_IMAGE="
-${CMDNAME} push_image [FLAGS]
-
-      Pushes images to GitHub registry.
-
-      You can add --github-repository to push to a different 
repository/organisation.
-      You can add --github-image-id <COMMIT_SHA> in case you want to push 
image with specific
-      SHA tag.
-      You can also add --production-image flag to switch to production image 
(default is CI one)
-
-      Examples:
-
-      '${CMDNAME} push-image' or
-      '${CMDNAME} push-image --production-image' - to push production image or
-      '${CMDNAME} push-image \\
-            --github-repository user/airflow' - to push to your user's fork
-      '${CMDNAME} push-image \\
-            --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - to 
push with COMMIT_SHA
-
-Flags:
-$(breeze::flag_pull_push_docker_images)
-$(breeze::flag_verbosity)
-"
-    readonly DETAILED_USAGE_PUSH_IMAGE
     export DETAILED_USAGE_KIND_CLUSTER="
 ${CMDNAME} kind-cluster [FLAGS] OPERATION
 
@@ -2651,17 +2625,6 @@ function breeze::flag_build_docker_images() {
         automatically for the first time or when changes are detected in
         package-related files, but you can force it using this flag.
 
--P, --force-pull-images
-        Forces pulling of images from GitHub Container Registry before 
building to populate cache.
-        The images are pulled by default only for the first time you run the
-        environment, later the locally build images are used as cache.
-
---check-if-base-python-image-updated
-        Checks if Python base image from DockerHub has been updated vs the 
current python base
-        image we store in GitHub Container Registry. Python images are updated 
regularly with
-        security fixes, this switch will check if a new one has been released 
and will pull and
-        prepare a new base python based on the latest one.
-
 --cleanup-docker-context-files
         Removes whl and tar.gz files created in docker-context-files before 
running the command.
         In case there are some files there it unnecessarily increases the 
context size and
@@ -2966,7 +2929,7 @@ $(breeze::flag_build_docker_images)
 
 $(breeze::print_star_line)
  Flags for pulling/pushing Docker images (both CI and production)
-$(breeze::flag_pull_push_docker_images)
+$(breeze::flag_pull_push_docker_images "show_sha")
 
 $(breeze::print_star_line)
  Flags for running tests
@@ -3342,15 +3305,21 @@ function breeze::run_build_command() {
         build_images::prepare_ci_build
         build_images::rebuild_ci_image_if_needed
         ;;
-    perform_push_image)
+    build_image)
+        if [[ ${CLEANUP_DOCKER_CONTEXT_FILES} == "true" ]]; then
+            build_images::cleanup_docker_context_files
+        fi
+        build_images::check_for_docker_context_files
         if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
             build_images::prepare_prod_build
+            build_images::build_prod_images
         else
+
             build_images::prepare_ci_build
             build_images::rebuild_ci_image_if_needed
         fi
         ;;
-    build_image)
+    prepare_build_cache)
         if [[ ${CLEANUP_DOCKER_CONTEXT_FILES} == "true" ]]; then
             build_images::cleanup_docker_context_files
         fi
@@ -3510,7 +3479,7 @@ function breeze::run_breeze_command() {
         breeze::run_static_checks "${@}"
         ;;
     build_image) ;;
-
+    prepare_build_cache) ;;
     cleanup_image)
         breeze::remove_images
         ;;
@@ -3530,13 +3499,6 @@ function breeze::run_breeze_command() {
         docker_engine_resources::check_all_resources
         runs::run_prepare_provider_documentation "${@}"
         ;;
-    perform_push_image)
-        if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
-            push_pull_remove_images::push_prod_images_to_github
-        else
-            push_pull_remove_images::push_ci_images_to_github
-        fi
-        ;;
     perform_initialize_local_virtualenv)
         breeze::initialize_virtualenv
         ;;
diff --git a/breeze-complete b/breeze-complete
index 08b960d..675df3f 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -189,7 +189,7 @@ runtime-apt-deps: additional-runtime-apt-deps: 
runtime-apt-command: additional-r
 load-default-connections load-example-dags
 use-packages-from-dist no-rbac-ui package-format: 
upgrade-to-newer-dependencies installation-method: 
continue-on-pip-check-failure non-interactive generate-providers-issue
 use-airflow-version:
-cleanup-docker-context-files
+cleanup-docker-context-files prepare-buildx-cache
 test-type: preserve-volumes dry-run-docker
 executor:
 "
@@ -198,10 +198,10 @@ _breeze_commands="
 shell
 build-docs
 build-image
+prepare-build-cache
 cleanup-image
 exec
 generate-constraints
-push-image
 initialize-local-virtualenv
 prepare-airflow-packages
 setup-autocomplete
diff --git a/dev/REFRESHING_CI_CACHE.md b/dev/REFRESHING_CI_CACHE.md
index b98d97c..653ed88 100644
--- a/dev/REFRESHING_CI_CACHE.md
+++ b/dev/REFRESHING_CI_CACHE.md
@@ -77,6 +77,11 @@ git push
 
 # Manually refreshing the images
 
+Note that in order to refresh images you have to not only have `buildx` 
command installed for docker,
+but you should also make sure that you have the buildkit builder configured 
and set.
+
+More information can be found 
[here](https://docs.docker.com/engine/reference/commandline/buildx_create/)
+
 The images can be rebuilt and refreshed after the constraints are pushed. 
Refreshing image for particular
 python version is a simple as running the 
[refresh_images.sh](refresh_images.sh) script with python version
 as parameter:
diff --git a/dev/refresh_images.sh b/dev/refresh_images.sh
index 38e283a..8ee6b48 100755
--- a/dev/refresh_images.sh
+++ b/dev/refresh_images.sh
@@ -31,8 +31,5 @@ fi
 
 python_version=$1
 
-./breeze build-image --python "${python_version}" --build-cache-pulled  
--check-if-base-python-image-updated --verbose
-./breeze build-image --python "${python_version}" --build-cache-pulled  
--production-image --verbose
-
-./breeze push-image --python "${python_version}"
-./breeze push-image --production-image --python "${python_version}"
+./breeze prepare-build-cache --python "${python_version}" --verbose
+./breeze prepare-build-cache --python "${python_version}" --production-image 
--verbose
diff --git a/docs/docker-stack/build.rst b/docs/docker-stack/build.rst
index 165c5b0..2702c66 100644
--- a/docs/docker-stack/build.rst
+++ b/docs/docker-stack/build.rst
@@ -68,7 +68,7 @@ In the simplest case building your image consists of those 
steps:
 
 .. code-block:: shell
 
-   docker build . -f Dockerfile --tag my-image:0.0.1
+   docker build . -f Dockerfile --pull --tag my-image:0.0.1
 
 3) [Optional] Test the image. Airflow contains tool that allows you to test 
the image. This step however,
    requires locally checked out or extracted Airflow sources. If you happen to 
have the sources you can
@@ -223,7 +223,7 @@ You should be aware, about a few things:
 
 * You can build your image without any need for Airflow sources. It is enough 
that you place the
   ``Dockerfile`` and any files that are referred to (such as Dag files) in a 
separate directory and run
-  a command ``docker build . --tag my-image:my-tag`` (where ``my-image`` is 
the name you want to name it
+  a command ``docker build . --pull --tag my-image:my-tag`` (where 
``my-image`` is the name you want to name it
   and ``my-tag`` is the tag you want to tag the image with.
 
 * If your way of extending image requires to create writable directories, you 
MUST remember about adding
diff --git 
a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh 
b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
index e6d8ddc..c6ffd20 100755
--- 
a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
+++ 
b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
@@ -25,6 +25,7 @@ cd "${AIRFLOW_SOURCES}"
 export AIRFLOW_VERSION=2.2.2
 
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.6-slim-buster" \
     --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
     --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py" \
diff --git a/docs/docker-stack/docker-examples/customizing/custom-sources.sh 
b/docs/docker-stack/docker-examples/customizing/custom-sources.sh
index 3fcb72c..a7a65f2 100755
--- a/docs/docker-stack/docker-examples/customizing/custom-sources.sh
+++ b/docs/docker-stack/docker-examples/customizing/custom-sources.sh
@@ -25,6 +25,7 @@ cd "${AIRFLOW_SOURCES}"
 export AIRFLOW_VERSION=2.2.2
 
 docker build . -f Dockerfile \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \
diff --git 
a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh 
b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
index b38ebda..5a0a179 100755
--- 
a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
+++ 
b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
@@ -22,6 +22,7 @@ AIRFLOW_SOURCES="$(cd "$(dirname 
"${BASH_SOURCE[0]}")/../../../../" && pwd)"
 cd "${AIRFLOW_SOURCES}"
 # [START build]
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \
     --build-arg 
AIRFLOW_INSTALLATION_METHOD="https://github.com/potiuk/airflow/archive/main.tar.gz#egg=apache-airflow";
 \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \
diff --git a/docs/docker-stack/docker-examples/customizing/github-main.sh 
b/docs/docker-stack/docker-examples/customizing/github-main.sh
index ed1dc36..fc1f514 100755
--- a/docs/docker-stack/docker-examples/customizing/github-main.sh
+++ b/docs/docker-stack/docker-examples/customizing/github-main.sh
@@ -23,6 +23,7 @@ cd "${AIRFLOW_SOURCES}"
 
 # [START build]
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg 
AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/main.tar.gz#egg=apache-airflow";
 \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \
diff --git a/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh 
b/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh
index cc40e12..16c4b20 100755
--- a/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh
+++ b/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh
@@ -23,6 +23,7 @@ cd "${AIRFLOW_SOURCES}"
 
 # [START build]
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \
     --build-arg 
AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/v2-2-test.tar.gz#egg=apache-airflow";
 \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-2" \
diff --git 
a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh 
b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh
index c0255e6..d9ac1e9 100755
--- a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh
+++ b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh
@@ -25,6 +25,7 @@ cd "${AIRFLOW_SOURCES}"
 export AIRFLOW_VERSION=2.2.2
 
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.6-slim-buster" \
     --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \
diff --git 
a/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh 
b/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh
index ffb0a49..cd96e5b 100755
--- a/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh
+++ b/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh
@@ -25,6 +25,7 @@ cd "${AIRFLOW_SOURCES}"
 export AIRFLOW_VERSION=2.2.2
 
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \
     --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
     --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \
diff --git 
a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh 
b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh
index a1e2dda..aa4ddf1 100755
--- a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh
+++ b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh
@@ -25,6 +25,7 @@ cd "${AIRFLOW_SOURCES}"
 export AIRFLOW_VERSION=2.2.2
 
 docker build . \
+    --build \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
     --tag "my-pypi-selected-version:0.0.1"
diff --git 
a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh 
b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh
index f4a1930..ad99a3c 100755
--- a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh
+++ b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh
@@ -35,6 +35,7 @@ pip download --dest docker-context-files \
 
 # [START build]
 docker build . \
+    --pull \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst
index a37c916..f66ac1a 100644
--- a/docs/docker-stack/entrypoint.rst
+++ b/docs/docker-stack/entrypoint.rst
@@ -257,7 +257,7 @@ Build your image and then you can run this script by 
running the command:
 
 .. code-block:: bash
 
-  docker build . --tag my-image:0.0.1
+  docker build . --pull --tag my-image:0.0.1
   docker run -it my-image:0.0.1 bash -c "/my_after_entrypoint_script.sh"
 
 
diff --git a/docs/docker-stack/recipes.rst b/docs/docker-stack/recipes.rst
index f27ed51..a1c5777 100644
--- a/docs/docker-stack/recipes.rst
+++ b/docs/docker-stack/recipes.rst
@@ -40,6 +40,7 @@ Then build a new image.
 .. code-block:: bash
 
   docker build . \
+    --pull \
     --build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.2" \
     --tag my-airflow-image:0.0.1
 
@@ -66,5 +67,6 @@ Then build a new image.
 .. code-block:: bash
 
   docker build . \
+    --pull \
     --build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.2" \
     --tag my-airflow-image:0.0.1
diff --git a/docs/helm-chart/manage-dags-files.rst 
b/docs/helm-chart/manage-dags-files.rst
index 37ec903..b889a1a 100644
--- a/docs/helm-chart/manage-dags-files.rst
+++ b/docs/helm-chart/manage-dags-files.rst
@@ -28,7 +28,7 @@ The recommended way to update your DAGs with this chart is to 
build a new docker
 
 .. code-block:: bash
 
-    docker build --tag "my-company/airflow:8a0da78" . -f - <<EOF
+    docker build --pull --tag "my-company/airflow:8a0da78" . -f - <<EOF
     FROM apache/airflow
 
     COPY ./dags/ \${AIRFLOW_HOME}/dags/
@@ -43,7 +43,7 @@ The recommended way to update your DAGs with this chart is to 
build a new docker
 
 .. code-block:: bash
 
-    docker build --tag "my-company/airflow:8a0da78" . -f - <<EOF
+    docker build --pull --tag "my-company/airflow:8a0da78" . -f - <<EOF
     FROM apache/airflow:2.0.2
 
     USER root
diff --git a/docs/helm-chart/quick-start.rst b/docs/helm-chart/quick-start.rst
index 44f13de..56e99c1 100644
--- a/docs/helm-chart/quick-start.rst
+++ b/docs/helm-chart/quick-start.rst
@@ -111,7 +111,7 @@ Adding DAGs to your image
 
     .. code-block:: bash
 
-        docker build --tag my-dags:0.0.1 .
+        docker build --pull --tag my-dags:0.0.1 .
 
 
 3. Load the image into kind:
@@ -155,7 +155,7 @@ Example below adds ``vim`` apt package.
 
     .. code-block:: bash
 
-        docker build --tag my-image:0.0.1 .
+        docker build --pull --tag my-image:0.0.1 .
 
 
 3. Load the image into kind:
@@ -192,7 +192,7 @@ Example below adds ``lxml`` PyPI package.
 
     .. code-block:: bash
 
-        docker build --tag my-image:0.0.1 .
+        docker build --pull --tag my-image:0.0.1 .
 
 
 3. Load the image into kind:
diff --git a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh 
b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
index 9dd1053..18de9c3 100755
--- a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
@@ -38,11 +38,6 @@ function build_ci_image_on_ci() {
     else
         build_images::rebuild_ci_image_if_needed
     fi
-
-    # Disable force pulling forced above this is needed for the subsequent 
scripts so that
-    # They do not try to pull/build images again.
-    unset FORCE_PULL_IMAGES
-    unset FORCE_BUILD
     # Skip the image check entirely for the rest of the script
     export CHECK_IMAGE_FOR_REBUILD="false"
     start_end::group_end
diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh 
b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index 49cb06a..85f87e3 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -38,10 +38,6 @@ function build_prod_images_on_ci() {
     else
         build_images::build_prod_images_from_locally_built_airflow_packages
     fi
-
-    # Disable force pulling forced above this is needed for the subsequent 
scripts so that
-    # They do not try to pull/build images again
-    unset FORCE_PULL_IMAGES
     unset FORCE_BUILD
 }
 
diff --git a/scripts/ci/images/ci_push_ci_images.sh 
b/scripts/ci/images/ci_push_ci_images.sh
index 6e232fa..30e211e 100755
--- a/scripts/ci/images/ci_push_ci_images.sh
+++ b/scripts/ci/images/ci_push_ci_images.sh
@@ -18,8 +18,16 @@
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+# Pushes Ci images with tags to registry in GitHub
+function push_ci_image_with_tag_to_github() {
+    start_end::group_start "Push CI image"
+    docker_v tag "${AIRFLOW_CI_IMAGE}" 
"${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    push_pull_remove_images::push_image_with_retries 
"${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    start_end::group_end
+}
+
 build_images::prepare_ci_build
 
 build_images::login_to_docker_registry
 
-push_pull_remove_images::push_ci_images_to_github
+push_ci_image_with_tag_to_github
diff --git a/scripts/ci/images/ci_push_production_images.sh 
b/scripts/ci/images/ci_push_production_images.sh
index 7e0e0ea..f7bc119 100755
--- a/scripts/ci/images/ci_push_production_images.sh
+++ b/scripts/ci/images/ci_push_production_images.sh
@@ -18,8 +18,17 @@
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+# Pushes PROD images with tags to registry in GitHub
+function push_prod_image_with_tag_to_github () {
+    start_end::group_start "Push PROD image"
+    local 
airflow_prod_tagged_image="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_tagged_image}"
+    push_pull_remove_images::push_image_with_retries 
"${airflow_prod_tagged_image}"
+    start_end::group_end
+}
+
 build_images::prepare_prod_build
 
 build_images::login_to_docker_registry
 
-push_pull_remove_images::push_prod_images_to_github
+push_prod_image_with_tag_to_github
diff --git a/scripts/ci/images/ci_run_prod_image_test.sh 
b/scripts/ci/images/ci_run_prod_image_test.sh
index 24c30e0..746bf68 100755
--- a/scripts/ci/images/ci_run_prod_image_test.sh
+++ b/scripts/ci/images/ci_run_prod_image_test.sh
@@ -36,7 +36,7 @@ elif [[ ${file} == *"Dockerfile" ]]; then
     echo "${COLOR_BLUE}Replacing the airflow image version in ${file} with 
${latest_airflow_version_released} for testing.${COLOR_RESET}"
     echo
     sed  "s/FROM apache\/airflow:.*$/FROM 
apache\/airflow:${latest_airflow_version_released}/" <Dockerfile | \
-    docker build . --tag "${job_name}" -f -
+    docker build --pull . --tag "${job_name}" -f -
     res=$?
     docker rmi --force "${job_name}"
 else
diff --git a/scripts/ci/libraries/_all_libs.sh 
b/scripts/ci/libraries/_all_libs.sh
index b2ccbed..512ee95 100755
--- a/scripts/ci/libraries/_all_libs.sh
+++ b/scripts/ci/libraries/_all_libs.sh
@@ -52,8 +52,6 @@ readonly SCRIPTS_CI_DIR
 . "${LIBRARIES_DIR}"/_push_pull_remove_images.sh
 # shellcheck source=scripts/ci/libraries/_runs.sh
 . "${LIBRARIES_DIR}"/_runs.sh
-# shellcheck source=scripts/ci/libraries/_spinner.sh
-. "${LIBRARIES_DIR}"/_spinner.sh
 # shellcheck source=scripts/ci/libraries/_start_end.sh
 . "${LIBRARIES_DIR}"/_start_end.sh
 # shellcheck source=scripts/ci/libraries/_testing.sh
diff --git a/scripts/ci/libraries/_build_images.sh 
b/scripts/ci/libraries/_build_images.sh
index abc06bd..9229c3f 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -108,27 +108,60 @@ function build_images::forget_last_answer() {
     fi
 }
 
-function build_images::confirm_via_terminal() {
-    echo >"${DETECTED_TERMINAL}"
-    echo >"${DETECTED_TERMINAL}"
+
+function build_images::reconfirm_rebuilding_if_not_rebased() {
+    local latest_main_commit_sha
+    latest_main_commit_sha=$(curl -s -H "Accept: 
application/vnd.github.VERSION.sha" \
+        
"https://api.github.com/repos/${GITHUB_REPOSITORY}/commits/${DEFAULT_BRANCH}";)
+    if [[ "$(git log --format=format:%H | grep -c 
"${latest_main_commit_sha}")" == "0" ]]; then
+         echo
+         echo "${COLOR_YELLOW}WARNING!!!!:You are not rebased on top of the 
latest ${DEFAULT_BRANCH} branch of the airflow repo.${COLOR_RESET}"
+         echo "${COLOR_YELLOW}The rebuild might take a lot of time and you 
might need to do it again${COLOR_RESET}"
+         echo
+         echo "${COLOR_YELLOW}It is STRONGLY RECOMMENDED that you rebase your 
code first!${COLOR_RESET}"
+         echo
+         "${AIRFLOW_SOURCES}/confirm" "You are really sure you want to rebuild 
${THE_IMAGE_TYPE}-python${PYTHON_MAJOR_MINOR_VERSION}"
+         RES=$?
+    fi
+}
+
+function build_images::print_modified_files() {
+    echo "${MODIFIED_FILES[@]}" | xargs -n 1 echo " * "
+}
+
+function build_images::encourage_rebuilding_on_modified_files() {
+    echo
     set +u
     if [[ ${#MODIFIED_FILES[@]} != "" ]]; then
-        echo "${COLOR_YELLOW}The CI image for Python ${PYTHON_BASE_IMAGE} 
image likely needs to be rebuilt${COLOR_RESET}" >"${DETECTED_TERMINAL}"
-        echo "${COLOR_YELLOW}The files were modified since last build: 
${MODIFIED_FILES[*]}${COLOR_RESET}" >"${DETECTED_TERMINAL}"
+        echo
+        echo "${COLOR_YELLOW}The CI image for Python 
${PYTHON_MAJOR_MINOR_VERSION} image might be outdated${COLOR_RESET}"
+        echo
+        echo "${COLOR_BLUE}Please run this command at earliest convenience: 
${COLOR_RESET}"
+        echo
+        echo "${COLOR_YELLOW}./breeze build-image --python 
${PYTHON_MAJOR_MINOR_VERSION}${COLOR_RESET}"
+        echo
     fi
-    if [[ ${ACTION} == "pull and rebuild" ]]; then
-        echo "${COLOR_YELLOW}This build involves pull and it might take some 
time and network to pull the base image first!${COLOR_RESET}" 
>"${DETECTED_TERMINAL}"
+}
+
+function build_images::confirm_rebuilding_on_modified_files() {
+    echo
+    set +u
+    if [[ ${#MODIFIED_FILES[@]} != "" ]]; then
+        echo "${COLOR_BLUE}The CI image for Python 
${PYTHON_MAJOR_MINOR_VERSION} image likely needs to be rebuild${COLOR_RESET}"
+        echo "${COLOR_BLUE}The files were modified since last 
build:${COLOR_RESET}"
+        echo
+        echo "${COLOR_BLUE}$(build_images::print_modified_files)${COLOR_RESET}"
+        echo
     fi
     set -u
-    echo >"${DETECTED_TERMINAL}"
-    echo "${COLOR_YELLOW}WARNING!!!!:Make sure that you rebased to latest 
upstream before rebuilding or the rebuild might take a lot of 
time!${COLOR_RESET}" >"${DETECTED_TERMINAL}"
-    echo >"${DETECTED_TERMINAL}"
     # Make sure to use output of tty rather than stdin/stdout when available - 
this way confirm
     # will works also in case of pre-commits (git does not pass stdin/stdout 
to pre-commit hooks)
     # shellcheck disable=SC2094
-    "${AIRFLOW_SOURCES}/confirm" "${ACTION} image 
${THE_IMAGE_TYPE}-python${PYTHON_MAJOR_MINOR_VERSION}" \
-        <"${DETECTED_TERMINAL}" >"${DETECTED_TERMINAL}"
+    "${AIRFLOW_SOURCES}/confirm" "PULL & BUILD the image 
${THE_IMAGE_TYPE}-python${PYTHON_MAJOR_MINOR_VERSION}"
     RES=$?
+    if [[ ${RES} == "0" ]]; then
+        build_images::reconfirm_rebuilding_if_not_rebased
+    fi
 }
 
 # Confirms if the image should be rebuilt and interactively checks it with the 
user.
@@ -137,10 +170,6 @@ function build_images::confirm_via_terminal() {
 # So that the script works also from within pre-commit run via git hooks - 
where stdin is not
 # available - it tries to find usable terminal and ask the user via this 
terminal.
 function build_images::confirm_image_rebuild() {
-    ACTION="rebuild"
-    if [[ ${FORCE_PULL_IMAGES:=} == "true" ]]; then
-        ACTION="pull and rebuild"
-    fi
     if [[ -f "${LAST_FORCE_ANSWER_FILE}" ]]; then
         # set variable from last answered response given in the same 
pre-commit run - so that it can be
         # answered in the first pre-commit check (build) and then used in 
another (mypy/flake8 etc).
@@ -151,7 +180,7 @@ function build_images::confirm_image_rebuild() {
     local RES
     if [[ ${CI:="false"} == "true" ]]; then
         verbosity::print_info
-        verbosity::print_info "CI environment - forcing rebuild for image 
${THE_IMAGE_TYPE}."
+        verbosity::print_info "CI environment - forcing pull and rebuild for 
image ${THE_IMAGE_TYPE}."
         verbosity::print_info
         RES="0"
     elif [[ -n "${FORCE_ANSWER_TO_QUESTIONS=}" ]]; then
@@ -171,24 +200,17 @@ function build_images::confirm_image_rebuild() {
         esac
     elif [[ -t 0 ]]; then
         # Check if this script is run interactively with stdin open and 
terminal attached
-        echo
-        set +u
-        if [[ ${#MODIFIED_FILES[@]} != "" ]]; then
-            echo "${COLOR_YELLOW}The CI image for Python ${PYTHON_BASE_IMAGE} 
image likely needs to be rebuilt${COLOR_RESET}"
-            echo "${COLOR_YELLOW}The files were modified since last build: 
${MODIFIED_FILES[*]}${COLOR_RESET}"
-        fi
-        echo
-        echo "${COLOR_YELLOW}WARNING!!!!:Make sure that you rebased to latest 
upstream before rebuilding or the rebuild might take a lot of 
time!${COLOR_RESET}"
-        echo
-        set -u
-        "${AIRFLOW_SOURCES}/confirm" "${ACTION} image 
${THE_IMAGE_TYPE}-python${PYTHON_MAJOR_MINOR_VERSION}"
-        RES=$?
+         build_images::confirm_rebuilding_on_modified_files
     elif [[ ${DETECTED_TERMINAL:=$(tty)} != "not a tty" ]]; then
         export DETECTED_TERMINAL
-        build_images::confirm_via_terminal
+        # shellcheck disable=SC2094
+        build_images::encourage_rebuilding_on_modified_files 
>"${DETECTED_TERMINAL}" <"${DETECTED_TERMINAL}"
+        RES=1
     elif [[ -c /dev/tty ]]; then
         export DETECTED_TERMINAL=/dev/tty
-        build_images::confirm_via_terminal
+        # shellcheck disable=SC2094
+        build_images::encourage_rebuilding_on_modified_files 
>"${DETECTED_TERMINAL}" <"${DETECTED_TERMINAL}"
+        RES=1
     else
         verbosity::print_info
         verbosity::print_info "No terminal, no stdin - quitting"
@@ -250,128 +272,6 @@ function build_images::check_for_docker_context_files() {
     fi
 }
 
-# Builds local image manifest. It contains only one random file generated 
during Docker.ci build
-function build_images::build_ci_image_manifest() {
-    docker_v build \
-        --tag="${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" \
-        -f- . <<EOF
-FROM scratch
-COPY "manifests/local-build-cache-hash-${PYTHON_MAJOR_MINOR_VERSION}" 
/build-cache-hash
-LABEL org.opencontainers.image.source="https://github.com/${GITHUB_REPOSITORY}";
-CMD ""
-EOF
-}
-
-#
-# Retrieves information about build cache hash random file from the local image
-# The random file is generated during the build and is best indicator whether 
your local CI image
-# has been built using the same pulled image as the remote one
-#
-function build_images::get_local_build_cache_hash() {
-    set +e
-    local local_image_build_cache_file
-    
local_image_build_cache_file="${AIRFLOW_SOURCES}/manifests/local-build-cache-hash-${PYTHON_MAJOR_MINOR_VERSION}"
-    # Remove the container just in case
-    docker_v rm --force "local-airflow-ci-container" 2>/dev/null >/dev/null
-    if ! docker_v inspect "${AIRFLOW_CI_IMAGE_WITH_TAG}" 2>/dev/null 
>/dev/null; then
-        verbosity::print_info
-        verbosity::print_info "Local airflow CI image not available"
-        verbosity::print_info
-        LOCAL_MANIFEST_IMAGE_UNAVAILABLE="true"
-        export LOCAL_MANIFEST_IMAGE_UNAVAILABLE
-        touch "${local_image_build_cache_file}"
-        set -e
-        return
-
-    fi
-    docker_v create --name "local-airflow-ci-container" 
"${AIRFLOW_CI_IMAGE_WITH_TAG}" 2>/dev/null >/dev/null
-    docker_v cp "local-airflow-ci-container:/build-cache-hash" \
-        "${local_image_build_cache_file}" 2>/dev/null ||
-        touch "${local_image_build_cache_file}"
-    set -e
-    verbosity::print_info
-    verbosity::print_info "Local build cache hash: '$(cat 
"${local_image_build_cache_file}")'"
-    verbosity::print_info
-}
-
-# Retrieves information about the build cache hash random file from the remote 
image.
-# We use manifest image for that, which is a really, really small image to 
pull!
-# The image is a specially prepared manifest image which is built together 
with the main image and
-# pushed with it. This special manifest image is prepared during building of 
the CI image and contains
-# single file which is generated with random content during the docker
-# build in the right step of the image build (right after installing all 
dependencies of Apache Airflow
-# for the first time).
-# When this random file gets regenerated it means that either base image has 
changed before that step
-# or some of the earlier layers was modified - which means that it is usually 
faster to pull
-# that image first and then rebuild it.
-function build_images::get_remote_image_build_cache_hash() {
-    set +e
-    local remote_image_container_id_file
-    remote_image_container_id_file="$(mktemp)"
-    local remote_image_build_cache_file
-    remote_image_build_cache_file=$(mktemp)
-    local 
target_remote_cache_file="${AIRFLOW_SOURCES}/manifests/remote-build-cache-hash-${PYTHON_MAJOR_MINOR_VERSION}"
-    # Pull remote manifest image
-    if ! docker_v pull "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" 2>/dev/null 
>/dev/null; then
-        verbosity::print_info
-        verbosity::print_info "Remote docker registry unreachable"
-        verbosity::print_info
-        REMOTE_DOCKER_REGISTRY_UNREACHABLE="true"
-        export REMOTE_DOCKER_REGISTRY_UNREACHABLE
-        touch "${remote_image_build_cache_file}"
-        set -e
-        return
-    fi
-    set -e
-    rm -f "${remote_image_container_id_file}"
-    # Create container dump out of the manifest image without actually running 
it
-    docker_v create --cidfile "${remote_image_container_id_file}" 
"${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}"
-    # Extract manifest and store it in local file
-    docker_v cp "$(cat "${remote_image_container_id_file}"):/build-cache-hash" 
\
-        "${remote_image_build_cache_file}"
-    # The `mv` is an atomic operation so even if we run it in parallel (for 
example in flake) it will
-    # never be empty (happened in the past)
-    mv "${remote_image_build_cache_file}" "${target_remote_cache_file}"
-    docker_v rm --force "$(cat "${remote_image_container_id_file}")"
-    rm -f "${remote_image_container_id_file}"
-    verbosity::print_info
-    verbosity::print_info "Remote build cache hash: '$(cat 
"${target_remote_cache_file}")'"
-    verbosity::print_info
-}
-
-# Compares layers from both remote and local image and set FORCE_PULL_IMAGES 
to true in case
-# The random has in remote image is different than that in the local image
-# indicating that it is likely faster to pull the image from cache rather than 
let the
-# image rebuild fully locally
-function build_images::compare_local_and_remote_build_cache_hash() {
-    set +e
-    local local_image_build_cache_file
-    
local_image_build_cache_file="${AIRFLOW_SOURCES}/manifests/local-build-cache-hash-${PYTHON_MAJOR_MINOR_VERSION}"
-    local remote_image_build_cache_file
-    
remote_image_build_cache_file="${AIRFLOW_SOURCES}/manifests/remote-build-cache-hash-${PYTHON_MAJOR_MINOR_VERSION}"
-    local remote_hash
-    remote_hash=$(cat "${remote_image_build_cache_file}")
-    local local_hash
-    local_hash=$(cat "${local_image_build_cache_file}")
-
-    if [[ ${remote_hash} != "${local_hash}" || -z ${local_hash} ]]; then
-        echo
-        echo
-        echo "Your image and the dockerhub have different or missing build 
cache hashes."
-        echo "Local hash: '${local_hash}'. Remote hash: '${remote_hash}'."
-        echo
-        echo "Forcing pulling the images. It will be faster than rebuilding 
usually."
-        echo "You can avoid it by setting SKIP_CHECK_REMOTE_IMAGE to true"
-        echo
-        export FORCE_PULL_IMAGES="true"
-    else
-        echo
-        echo "No need to pull the image. Yours and remote cache hashes are the 
same!"
-        echo
-    fi
-    set -e
-}
-
 # Prints summary of the build parameters
 function build_images::print_build_info() {
     verbosity::print_info
@@ -398,10 +298,6 @@ function build_images::get_docker_cache_image_names() {
     
image_name="ghcr.io/$(build_images::get_github_container_registry_image_prefix)"
 
     # Example:
-    #  ghcr.io/apache/airflow/main/python:3.8-slim-buster
-    export 
AIRFLOW_PYTHON_BASE_IMAGE="${image_name}/${BRANCH_NAME}/python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster"
-
-    # Example:
     #  ghcr.io/apache/airflow/main/ci/python3.8
     export 
AIRFLOW_CI_IMAGE="${image_name}/${BRANCH_NAME}/ci/python${PYTHON_MAJOR_MINOR_VERSION}"
 
@@ -410,14 +306,6 @@ function build_images::get_docker_cache_image_names() {
     #  ghcr.io/apache/airflow/main/ci/python3.8:<COMMIT_SHA>
     export 
AIRFLOW_CI_IMAGE_WITH_TAG="${image_name}/${BRANCH_NAME}/ci/python${PYTHON_MAJOR_MINOR_VERSION}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 
-    # Example:
-    #  local-airflow-ci-manifest/main/python3.8
-    export 
AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local-airflow-ci-manifest/${BRANCH_NAME}/python${PYTHON_MAJOR_MINOR_VERSION}"
-
-    # Example:
-    #  ghcr.io/apache/airflow/main/ci-manifest/python3.8
-    export 
AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${image_name}/${BRANCH_NAME}/ci-manifest/python${PYTHON_MAJOR_MINOR_VERSION}"
-
     # File that is touched when the CI image is built for the first time 
locally
     export 
BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
 
@@ -425,16 +313,33 @@ function build_images::get_docker_cache_image_names() {
     #  ghcr.io/apache/airflow/main/prod/python3.8
     export 
AIRFLOW_PROD_IMAGE="${image_name}/${BRANCH_NAME}/prod/python${PYTHON_MAJOR_MINOR_VERSION}"
 
-    # Example:
-    #   ghcr.io/apache/airflow/main/prod-build/python3.8
-    export 
AIRFLOW_PROD_BUILD_IMAGE="${image_name}/${BRANCH_NAME}/prod-build/python${PYTHON_MAJOR_MINOR_VERSION}"
-
     # Kubernetes image to build
     #  ghcr.io/apache/airflow/main/kubernetes/python3.8
     export 
AIRFLOW_IMAGE_KUBERNETES="${image_name}/${BRANCH_NAME}/kubernetes/python${PYTHON_MAJOR_MINOR_VERSION}"
+}
 
-
-
+function build_images::check_if_buildx_plugin_available() {
+    export BUILD_COMMAND=("build")
+    local buildx_version
+    buildx_version=$(docker buildx version 2>/dev/null || true)
+    if [[ ${buildx_version} != "" ]]; then
+        BUILDX_PLUGIN_AVAILABLE="true"
+        export BUILD_COMMAND=("buildx" "build" "--builder" "default" 
"--progress=tty")
+    else
+        BUILDX_PLUGIN_AVAILABLE="false"
+    fi
+    if [[ ${PREPARE_BUILDX_CACHE} == "true" ]]; then
+        if [[ ${BUILDX_PLUGIN_AVAILABLE} == "true" ]]; then
+            export BUILD_COMMAND=("buildx" "build" "--builder" "airflow_cache" 
"--progress=tty")
+            docker_v buildx inspect airflow_cache || docker_v buildx create 
--name airflow_cache
+        else
+            echo
+            echo "${COLOR_RED}Buildx cli plugin is not available and you need 
it to prepare buildx cache.${COLOR_RESET}"
+            echo "${COLOR_RED}Please install it following 
https://docs.docker.com/buildx/working-with-buildx/${COLOR_RESET}";
+            echo
+            exit 1
+        fi
+    fi
 }
 
 # If GitHub Registry is used, login to the registry using GITHUB_USERNAME and
@@ -491,33 +396,19 @@ function build_images::prepare_ci_build() {
 # In case rebuild is needed, it determines (by comparing layers in local and 
remote image)
 # Whether pull is needed before rebuild.
 function build_images::rebuild_ci_image_if_needed() {
-    verbosity::print_info
-    verbosity::print_info "Checking if pull or just build for 
${THE_IMAGE_TYPE} is needed."
-    verbosity::print_info
     if [[ -f "${BUILT_CI_IMAGE_FLAG_FILE}" ]]; then
         verbosity::print_info
-        verbosity::print_info "${THE_IMAGE_TYPE} image already built locally."
+        verbosity::print_info "CI image already built locally."
         verbosity::print_info
     else
         verbosity::print_info
-        verbosity::print_info "${THE_IMAGE_TYPE} image not built locally: 
pulling and building"
-        verbosity::print_info
-        export FORCE_PULL_IMAGES="true"
-        export FORCE_BUILD_IMAGES="true"
-    fi
-
-    if [[ ${CHECK_IMAGE_FOR_REBUILD} == "false" ]]; then
-        verbosity::print_info
-        verbosity::print_info "Skip checking for rebuilds of the CI image but 
checking if it needs to be pulled"
+        verbosity::print_info "CI image not built locally: force pulling and 
building"
         verbosity::print_info
-        push_pull_remove_images::pull_ci_images_if_needed
-        return
+        export FORCE_BUILD="true"
     fi
     local needs_docker_build="false"
     md5sum::check_if_docker_build_is_needed
-    build_images::get_local_build_cache_hash
     if [[ ${needs_docker_build} == "true" ]]; then
-        md5sum::check_if_pull_is_needed
         SKIP_REBUILD="false"
         if [[ ${CI:=} != "true" && "${FORCE_BUILD:=}" != "true" ]]; then
             build_images::confirm_image_rebuild
@@ -536,9 +427,7 @@ function build_images::rebuild_ci_image_if_needed() {
             verbosity::print_info "Build start: ${THE_IMAGE_TYPE} image."
             verbosity::print_info
             build_images::build_ci_image
-            build_images::get_local_build_cache_hash
             md5sum::update_all_md5
-            build_images::build_ci_image_manifest
             verbosity::print_info
             verbosity::print_info "Build completed: ${THE_IMAGE_TYPE} image."
             verbosity::print_info
@@ -556,60 +445,21 @@ function 
build_images::rebuild_ci_image_if_needed_with_group() {
     start_end::group_end
 }
 
-
-# Interactive version of confirming the ci image that is used in pre-commits
-# it displays additional information - what the user should do in order to 
bring the local images
-# back to state that pre-commit will be happy with
-function build_images::rebuild_ci_image_if_needed_and_confirmed() {
-    local needs_docker_build="false"
-    THE_IMAGE_TYPE="CI"
-
-    md5sum::check_if_docker_build_is_needed
-
-    if [[ ${needs_docker_build} == "true" ]]; then
-        md5sum::check_if_pull_is_needed
-        verbosity::print_info
-        verbosity::print_info "Docker image build is needed!"
-        verbosity::print_info
-    else
-        verbosity::print_info
-        verbosity::print_info "Docker image build is not needed!"
-        verbosity::print_info
-    fi
-
-    if [[ "${needs_docker_build}" == "true" ]]; then
-        SKIP_REBUILD="false"
-        build_images::confirm_image_rebuild
-
-        if [[ ${SKIP_REBUILD} != "true" ]]; then
-            build_images::rebuild_ci_image_if_needed
-        fi
-    fi
-}
-
 # Builds CI image - depending on the caching strategy (pulled, local, 
disabled) it
-# passes the necessary docker build flags via DOCKER_CACHE_CI_DIRECTIVE array
+# passes the necessary docker build flags via docker_ci_cache_directive array
 # it also passes the right Build args depending on the configuration of the 
build
 # selected by Breeze flags or environment variables.
 function build_images::build_ci_image() {
-    local spin_pid
+    build_images::check_if_buildx_plugin_available
     build_images::print_build_info
-    if [[ -n ${DETECTED_TERMINAL=} ]]; then
-        echo -n "Preparing ${AIRFLOW_CI_IMAGE}.
-        " >"${DETECTED_TERMINAL}"
-        spinner::spin "${OUTPUT_LOG}" &
-        spin_pid=$!
-        # shellcheck disable=SC2064,SC2016
-        traps::add_trap '$(kill '${spin_pid}' || true)' EXIT HUP INT TERM
-    fi
-    push_pull_remove_images::pull_ci_images_if_needed
+    local docker_ci_cache_directive
     if [[ "${DOCKER_CACHE}" == "disabled" ]]; then
-        export DOCKER_CACHE_CI_DIRECTIVE=("--no-cache")
+        docker_ci_cache_directive=("--no-cache")
     elif [[ "${DOCKER_CACHE}" == "local" ]]; then
-        export DOCKER_CACHE_CI_DIRECTIVE=()
+        docker_ci_cache_directive=()
     elif [[ "${DOCKER_CACHE}" == "pulled" ]]; then
-        export DOCKER_CACHE_CI_DIRECTIVE=(
-            "--cache-from" "${AIRFLOW_CI_IMAGE}"
+        docker_ci_cache_directive=(
+            "--cache-from=${AIRFLOW_CI_IMAGE}:cache"
         )
     else
         echo
@@ -617,37 +467,23 @@ function build_images::build_ci_image() {
         echo
         exit 1
     fi
-    EXTRA_DOCKER_CI_BUILD_FLAGS=(
-    )
+    if [[ ${PREPARE_BUILDX_CACHE} == "true" ]]; then
+        docker_ci_cache_directive+=(
+            "--cache-to=type=registry,ref=${AIRFLOW_CI_IMAGE}:cache"
+            "--load"
+        )
+    fi
+    local extra_docker_ci_flags=()
     if [[ ${CI} == "true" ]]; then
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
             "--build-arg" "PIP_PROGRESS_BAR=off"
         )
     fi
     if [[ -n "${AIRFLOW_CONSTRAINTS_LOCATION}" ]]; then
-        EXTRA_DOCKER_CI_BUILD_FLAGS+=(
+        extra_docker_ci_flags+=(
             "--build-arg" 
"AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}"
         )
     fi
-
-    if [[ -n ${spin_pid=} ]]; then
-        kill -HUP "${spin_pid}" || true
-        wait "${spin_pid}" || true
-        echo >"${DETECTED_TERMINAL}"
-    fi
-    if [[ -n ${DETECTED_TERMINAL=} ]]; then
-        echo -n "Preparing ${AIRFLOW_CI_IMAGE}.
-        " >"${DETECTED_TERMINAL}"
-        spinner::spin "${OUTPUT_LOG}" &
-        spin_pid=$!
-        # shellcheck disable=SC2064,SC2016
-        traps::add_trap '$(kill '${spin_pid}' || true)' EXIT HUP INT TERM
-    fi
-    if [[ -n ${DETECTED_TERMINAL=} ]]; then
-        echo -n "
-Docker building ${AIRFLOW_CI_IMAGE}.
-" >"${DETECTED_TERMINAL}"
-    fi
     set +u
 
     local additional_dev_args=()
@@ -665,9 +501,10 @@ Docker building ${AIRFLOW_CI_IMAGE}.
     if [[ -n "${RUNTIME_APT_COMMAND}" ]]; then
         additional_runtime_args+=("--build-arg" 
"RUNTIME_APT_COMMAND=\"${RUNTIME_APT_COMMAND}\"")
     fi
-    docker_v build \
-        "${EXTRA_DOCKER_CI_BUILD_FLAGS[@]}" \
-        --build-arg PYTHON_BASE_IMAGE="${AIRFLOW_PYTHON_BASE_IMAGE}" \
+    docker_v "${BUILD_COMMAND[@]}" \
+        "${extra_docker_ci_flags[@]}" \
+        --pull \
+        --build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \
         --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
         --build-arg AIRFLOW_BRANCH="${BRANCH_NAME}" \
         --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
@@ -690,7 +527,7 @@ Docker building ${AIRFLOW_CI_IMAGE}.
         --build-arg COMMIT_SHA="${COMMIT_SHA}" \
         "${additional_dev_args[@]}" \
         "${additional_runtime_args[@]}" \
-        "${DOCKER_CACHE_CI_DIRECTIVE[@]}" \
+        "${docker_ci_cache_directive[@]}" \
         -t "${AIRFLOW_CI_IMAGE}" \
         --target "main" \
         . -f Dockerfile.ci
@@ -699,11 +536,6 @@ Docker building ${AIRFLOW_CI_IMAGE}.
         echo "Tagging additionally image ${AIRFLOW_CI_IMAGE} with ${IMAGE_TAG}"
         docker_v tag "${AIRFLOW_CI_IMAGE}" "${IMAGE_TAG}"
     fi
-    if [[ -n ${spin_pid=} ]]; then
-        kill -HUP "${spin_pid}" || true
-        wait "${spin_pid}" || true
-        echo >"${DETECTED_TERMINAL}"
-    fi
 }
 
 # Prepares all variables needed by the CI build. Depending on the 
configuration used (python version
@@ -759,10 +591,11 @@ function build_images::prepare_prod_build() {
 
 # Builds PROD image - depending on the caching strategy (pulled, local, 
disabled) it
 # passes the necessary docker build flags via DOCKER_CACHE_PROD_DIRECTIVE and
-# DOCKER_CACHE_PROD_BUILD_DIRECTIVE (separate caching options are needed for 
"build" segment of the image)
+# docker_cache_prod_build_directive (separate caching options are needed for 
"build" segment of the image)
 # it also passes the right Build args depending on the configuration of the 
build
 # selected by Breeze flags or environment variables.
 function build_images::build_prod_images() {
+    build_images::check_if_buildx_plugin_available
     build_images::print_build_info
 
     if [[ ${SKIP_BUILDING_PROD_IMAGE} == "true" ]]; then
@@ -772,22 +605,14 @@ function build_images::build_prod_images() {
         echo
         return
     fi
-
-    push_pull_remove_images::pull_prod_images_if_needed
-
+    local docker_cache_prod_directive
     if [[ "${DOCKER_CACHE}" == "disabled" ]]; then
-        export DOCKER_CACHE_PROD_DIRECTIVE=("--cache-from" 
"${AIRFLOW_PROD_BUILD_IMAGE}")
-        export DOCKER_CACHE_PROD_BUILD_DIRECTIVE=("--no-cache")
+        docker_cache_prod_directive=("--no-cache")
     elif [[ "${DOCKER_CACHE}" == "local" ]]; then
-        export DOCKER_CACHE_PROD_DIRECTIVE=()
-        export DOCKER_CACHE_PROD_BUILD_DIRECTIVE=()
+        docker_cache_prod_directive=()
     elif [[ "${DOCKER_CACHE}" == "pulled" ]]; then
-        export DOCKER_CACHE_PROD_DIRECTIVE=(
-            "--cache-from" "${AIRFLOW_PROD_BUILD_IMAGE}"
-            "--cache-from" "${AIRFLOW_PROD_IMAGE}"
-        )
-        export DOCKER_CACHE_PROD_BUILD_DIRECTIVE=(
-            "--cache-from" "${AIRFLOW_PROD_BUILD_IMAGE}"
+        docker_cache_prod_directive=(
+            "--cache-from=${AIRFLOW_PROD_IMAGE}:cache"
         )
     else
         echo
@@ -796,6 +621,13 @@ function build_images::build_prod_images() {
         echo
         exit 1
     fi
+    if [[ ${PREPARE_BUILDX_CACHE} == "true" ]]; then
+        # Cache for prod image contains also build stage for buildx when 
mode=max specified!
+        docker_cache_prod_directive+=(
+            "--cache-to=type=registry,ref=${AIRFLOW_PROD_IMAGE}:cache,mode=max"
+            "--load"
+        )
+    fi
     set +u
     local additional_dev_args=()
     if [[ -n "${DEV_APT_DEPS}" ]]; then
@@ -804,35 +636,6 @@ function build_images::build_prod_images() {
     if [[ -n "${DEV_APT_COMMAND}" ]]; then
         additional_dev_args+=("--build-arg" 
"DEV_APT_COMMAND=\"${DEV_APT_COMMAND}\"")
     fi
-    docker_v build \
-        "${EXTRA_DOCKER_PROD_BUILD_FLAGS[@]}" \
-        --build-arg PYTHON_BASE_IMAGE="${AIRFLOW_PYTHON_BASE_IMAGE}" \
-        --build-arg INSTALL_MYSQL_CLIENT="${INSTALL_MYSQL_CLIENT}" \
-        --build-arg INSTALL_MSSQL_CLIENT="${INSTALL_MSSQL_CLIENT}" \
-        --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
-        --build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
-        --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
-        --build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \
-        --build-arg ADDITIONAL_PYTHON_DEPS="${ADDITIONAL_PYTHON_DEPS}" \
-        "${additional_dev_args[@]}" \
-        --build-arg 
INSTALL_PROVIDERS_FROM_SOURCES="${INSTALL_PROVIDERS_FROM_SOURCES}" \
-        --build-arg ADDITIONAL_DEV_APT_COMMAND="${ADDITIONAL_DEV_APT_COMMAND}" 
\
-        --build-arg ADDITIONAL_DEV_APT_DEPS="${ADDITIONAL_DEV_APT_DEPS}" \
-        --build-arg ADDITIONAL_DEV_APT_ENV="${ADDITIONAL_DEV_APT_ENV}" \
-        --build-arg 
AIRFLOW_PRE_CACHED_PIP_PACKAGES="${AIRFLOW_PRE_CACHED_PIP_PACKAGES}" \
-        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
-        --build-arg 
INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
-        --build-arg 
UPGRADE_TO_NEWER_DEPENDENCIES="${UPGRADE_TO_NEWER_DEPENDENCIES}" \
-        --build-arg BUILD_ID="${CI_BUILD_ID}" \
-        --build-arg COMMIT_SHA="${COMMIT_SHA}" \
-        --build-arg 
CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \
-        --build-arg AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS}" \
-        --build-arg 
AIRFLOW_IMAGE_REPOSITORY="https://github.com/${GITHUB_REPOSITORY}"; \
-        --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date -u 
+'%Y-%m-%dT%H:%M:%SZ')" \
-        "${DOCKER_CACHE_PROD_BUILD_DIRECTIVE[@]}" \
-        -t "${AIRFLOW_PROD_BUILD_IMAGE}" \
-        --target "airflow-build-image" \
-        . -f Dockerfile
     local additional_runtime_args=()
     if [[ -n "${RUNTIME_APT_DEPS}" ]]; then
         additional_runtime_args+=("--build-arg" 
"RUNTIME_APT_DEPS=\"${RUNTIME_APT_DEPS}\"")
@@ -840,9 +643,10 @@ function build_images::build_prod_images() {
     if [[ -n "${RUNTIME_APT_COMMAND}" ]]; then
         additional_runtime_args+=("--build-arg" 
"RUNTIME_APT_COMMAND=\"${RUNTIME_APT_COMMAND}\"")
     fi
-    docker_v build \
+    docker_v "${BUILD_COMMAND[@]}" \
         "${EXTRA_DOCKER_PROD_BUILD_FLAGS[@]}" \
-        --build-arg PYTHON_BASE_IMAGE="${AIRFLOW_PYTHON_BASE_IMAGE}" \
+        --pull \
+        --build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \
         --build-arg INSTALL_MYSQL_CLIENT="${INSTALL_MYSQL_CLIENT}" \
         --build-arg INSTALL_MSSQL_CLIENT="${INSTALL_MSSQL_CLIENT}" \
         --build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \
@@ -869,7 +673,7 @@ function build_images::build_prod_images() {
         --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date -u 
+'%Y-%m-%dT%H:%M:%SZ')" \
         "${additional_dev_args[@]}" \
         "${additional_runtime_args[@]}" \
-        "${DOCKER_CACHE_PROD_DIRECTIVE[@]}" \
+        "${docker_cache_prod_directive[@]}" \
         -t "${AIRFLOW_PROD_IMAGE}" \
         --target "main" \
         . -f Dockerfile
@@ -899,11 +703,7 @@ function build_images::tag_image() {
 # and local to speed up iteration on kerberos tests
 function build_images::determine_docker_cache_strategy() {
     if [[ -z "${DOCKER_CACHE=}" ]]; then
-        if [[ "${PRODUCTION_IMAGE}" == "true" ]]; then
-            export DOCKER_CACHE="local"
-        else
-            export DOCKER_CACHE="pulled"
-        fi
+        export DOCKER_CACHE="pulled"
     fi
     verbosity::print_info
     verbosity::print_info "Using ${DOCKER_CACHE} cache strategy for the build."
diff --git a/scripts/ci/libraries/_initialization.sh 
b/scripts/ci/libraries/_initialization.sh
index 3df9513..acaa33d 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -87,6 +87,9 @@ function initialization::initialize_base_variables() {
     # so that all breeze commands use emulation
     export DOCKER_DEFAULT_PLATFORM=linux/amd64
 
+    # enable buildkit for builds
+    export DOCKER_BUILDKIT=1
+
     # Default port numbers for forwarded ports
     export SSH_PORT=${SSH_PORT:="12322"}
     export WEBSERVER_HOST_PORT=${WEBSERVER_HOST_PORT:="28080"}
@@ -179,9 +182,6 @@ function initialization::initialize_base_variables() {
     # Dry run - only show docker-compose and docker commands but do not 
execute them
     export DRY_RUN_DOCKER=${DRY_RUN_DOCKER:="false"}
 
-    # By default we only push built ci/prod images - base python images are 
only pushed
-    # When requested
-    export PUSH_PYTHON_BASE_IMAGE=${PUSH_PYTHON_BASE_IMAGE:="false"}
 }
 
 # Determine current branch
@@ -290,13 +290,6 @@ function initialization::initialize_mount_variables() {
 
 # Determine values of force settings
 function initialization::initialize_force_variables() {
-    # By default we do not pull CI/PROD images. We can force-pull them when 
needed
-    export FORCE_PULL_IMAGES=${FORCE_PULL_IMAGES:="false"}
-
-    # By default we do not pull python base image. We should do that only when 
we run upgrade check in
-    # CI main and when we manually refresh the images to latest versions
-    export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="false"
-
     # Determines whether to force build without checking if it is needed
     # Can be overridden by '--force-build-images' flag.
     export FORCE_BUILD_IMAGES=${FORCE_BUILD_IMAGES:="false"}
@@ -485,6 +478,9 @@ function initialization::initialize_image_build_variables() 
{
     #   * wheel - replaces airflow with one specified in the sdist file in 
/dist
     #   * <VERSION> - replaces airflow with the specific version from PyPI
     export USE_AIRFLOW_VERSION=${USE_AIRFLOW_VERSION:=""}
+
+    # whether images should be pushed to registry cache after they are built
+    export PREPARE_BUILDX_CACHE=${PREPARE_BUILDX_CACHE:="false"}
 }
 
 # Determine version suffixes used to build provider packages
@@ -577,6 +573,9 @@ function initialization::initialize_github_variables() {
 
 function initialization::initialize_test_variables() {
 
+    #Enables test coverage
+    export ENABLE_TEST_COVERAGE=${ENABLE_TEST_COVERAGE:=""}
+
     # In case we want to force certain test type to run, this variable should 
be set to this type
     # Otherwise TEST_TYPEs to run will be derived from TEST_TYPES 
space-separated string
     export FORCE_TEST_TYPE=${FORCE_TEST_TYPE:=""}
@@ -663,7 +662,6 @@ Mount variables:
 
 Force variables:
 
-    FORCE_PULL_IMAGES: ${FORCE_PULL_IMAGES}
     FORCE_BUILD_IMAGES: ${FORCE_BUILD_IMAGES}
     FORCE_ANSWER_TO_QUESTIONS: ${FORCE_ANSWER_TO_QUESTIONS}
     SKIP_CHECK_REMOTE_IMAGE: ${SKIP_CHECK_REMOTE_IMAGE}
diff --git a/scripts/ci/libraries/_md5sum.sh b/scripts/ci/libraries/_md5sum.sh
index b54eb02..0a90288 100644
--- a/scripts/ci/libraries/_md5sum.sh
+++ b/scripts/ci/libraries/_md5sum.sh
@@ -152,22 +152,3 @@ function md5sum::check_if_docker_build_is_needed() {
         fi
     fi
 }
-
-
-function md5sum::check_if_pull_is_needed() {
-   if [[ ${SKIP_CHECK_REMOTE_IMAGE:=} != "true" && ${DOCKER_CACHE} == "pulled" 
]]; then
-        # Check if remote image is different enough to force pull
-        # This is an optimisation pull vs. build time. When there
-        # are enough changes (specifically after setup.py changes) it is 
faster to pull
-        # and build the image rather than just build it
-        verbosity::print_info
-        verbosity::print_info "Checking if the remote image needs to be pulled"
-        verbosity::print_info
-        build_images::get_remote_image_build_cache_hash
-        if [[ ${REMOTE_DOCKER_REGISTRY_UNREACHABLE:=} != "true" && 
${LOCAL_MANIFEST_IMAGE_UNAVAILABLE:=} != "true" ]]; then
-            build_images::compare_local_and_remote_build_cache_hash
-        else
-            export FORCE_PULL_IMAGES="true"
-        fi
-    fi
-}
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh 
b/scripts/ci/libraries/_push_pull_remove_images.sh
index 9741e32..3f26f86 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -44,21 +44,15 @@ function push_pull_remove_images::push_image_with_retries() 
{
 }
 
 
-# Pulls image in case it is needed (either has never been pulled or pulling 
was forced
+# Pulls image in case it is missing
 # Should be run with set +e
 # Parameters:
 #   $1 -> image to pull
-#   $2 - fallback image
-function push_pull_remove_images::pull_image_if_not_present_or_forced() {
+function push_pull_remove_images::pull_image_if_missing() {
     local image_to_pull="${1}"
     local image_hash
     image_hash=$(docker images -q "${image_to_pull}" 2> /dev/null || true)
-    local pull_image=${FORCE_PULL_IMAGES}
-
     if [[ -z "${image_hash=}" ]]; then
-        pull_image="true"
-    fi
-    if [[ "${pull_image}" == "true" ]]; then
         echo
         echo "Pulling the image ${image_to_pull}"
         echo
@@ -66,191 +60,6 @@ function 
push_pull_remove_images::pull_image_if_not_present_or_forced() {
     fi
 }
 
-# Rebuilds python base image from the latest available Python version if it 
has been updated
-function 
push_pull_remove_images::check_and_rebuild_python_base_image_if_needed() {
-   docker_v pull "${PYTHON_BASE_IMAGE}"
-   local dockerhub_python_version
-   dockerhub_python_version=$(docker run "${PYTHON_BASE_IMAGE}" python -c 
'import sys; print(sys.version)')
-   local local_python_version
-   local_python_version=$(docker run "${AIRFLOW_PYTHON_BASE_IMAGE}" python -c 
'import sys; print(sys.version)' || true)
-   if [[ ${local_python_version} != "${dockerhub_python_version}" ]]; then
-       echo
-       echo "There is a new Python Base image updated!"
-       echo "The version used in Airflow: ${local_python_version}"
-       echo "The version available in DockerHub: ${dockerhub_python_version}"
-       echo "Rebuilding ${AIRFLOW_PYTHON_BASE_IMAGE} from the latest 
${PYTHON_BASE_IMAGE}"
-       echo
-       echo "FROM ${PYTHON_BASE_IMAGE}" | \
-            docker_v build \
-                --label 
"org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}"; \
-                -t "${AIRFLOW_PYTHON_BASE_IMAGE}" -
-  else
-      echo
-      echo "Not rebuilding the base python image - the image has the same 
python version ${dockerhub_python_version}"
-      echo
-  fi
-}
-
-# Pulls the base Python image. This image is used as base for CI and PROD 
images, depending on the parameters used:
-#
-# * if CHECK_IF_BASE_PYTHON_IMAGE_UPDATED == "true", then it checks if new 
image of Python has been released
-#     in DockerHub and it will rebuild the base python image and add the 
`org.opencontainers.image.source`
-#     label to it, so that it is linked to Airflow repository when we push it 
to the
-#     Github Container registry
-# * Otherwise it pulls the Python base image from GitHub Container Registry 
registry.
-#     In case we pull specific build image (via suffix)
-#     it will pull the right image using the specified suffix
-function push_pull_remove_images::pull_base_python_image() {
-    echo
-    echo "Docker pull base python image. Upgrade to newer deps: 
${UPGRADE_TO_NEWER_DEPENDENCIES}"
-    echo
-    if [[ -n ${DETECTED_TERMINAL=} ]]; then
-        echo -n "Docker pull base python image. Upgrade to newer deps: 
${UPGRADE_TO_NEWER_DEPENDENCIES}
-" > "${DETECTED_TERMINAL}"
-    fi
-    set +e
-    push_pull_remove_images::pull_image_if_not_present_or_forced 
"${AIRFLOW_PYTHON_BASE_IMAGE}"
-    local res="$?"
-    set -e
-    if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" || ${res} != "0" ]] 
; then
-        # Rebuild the base python image using DockerHub - either when we 
explicitly want it
-        # or when there is no image available yet in ghcr.io (usually when you 
build it for the
-        # first time in your repository
-        push_pull_remove_images::check_and_rebuild_python_base_image_if_needed
-    fi
-}
-
-# Pulls CI image in case caching strategy is "pulled" and the image needs to 
be pulled
-function push_pull_remove_images::pull_ci_images_if_needed() {
-    local python_image_hash
-    python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> 
/dev/null || true)
-    if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true" || \
-            ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]]; then
-        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]]; then
-            # Pull base python image when building latest image
-            push_pull_remove_images::pull_base_python_image
-        fi
-    fi
-    if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
-        set +e
-        push_pull_remove_images::pull_image_if_not_present_or_forced 
"${AIRFLOW_CI_IMAGE_WITH_TAG}"
-        local res="$?"
-        set -e
-        if [[ ${res} != "0" ]]; then
-            if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]] ; then
-                echo
-                echo "The CI image cache does not exist. This is likely the 
first time you build the image"
-                echo "Switching to 'local' cache for docker images"
-                echo
-                DOCKER_CACHE="local"
-            else
-                echo
-                echo "The CI image cache does not exist and we want to pull 
tag ${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-                echo "Failing as we have to pull the tagged image in order to 
continue"
-                echo
-                return "${res}"
-            fi
-        fi
-    fi
-}
-
-
-# Pulls PROD image in case caching strategy is "pulled" and the image needs to 
be pulled
-function push_pull_remove_images::pull_prod_images_if_needed() {
-    local python_image_hash
-    python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> 
/dev/null || true)
-    if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true"  || \
-            ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]]; then
-        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]]; then
-            # Pull base python image when building latest image
-            push_pull_remove_images::pull_base_python_image
-        fi
-    fi
-    if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
-        set +e
-        # "Build" segment of production image
-        push_pull_remove_images::pull_image_if_not_present_or_forced \
-            "${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        local res="$?"
-        if [[ ${res} == "0" ]]; then
-            # "Main" segment of production image
-            push_pull_remove_images::pull_image_if_not_present_or_forced \
-                "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-            res="$?"
-        fi
-        set -e
-        if [[ ${res} != "0" ]]; then
-            if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]] ; then
-                echo
-                echo "The PROD image cache does not exist. This is likely the 
first time you build the image"
-                echo "Switching to 'local' cache for docker images"
-                echo
-                DOCKER_CACHE="local"
-            else
-                echo
-                echo "The PROD image cache does not exist and we want to pull 
tag ${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-                echo "Failing as we have to pull the tagged image in order to 
continue"
-                echo
-                return "${res}"
-            fi
-        fi
-    fi
-}
-
-# Push image to GitHub registry with the push tag:
-#     "${COMMIT_SHA}" - in case of pull-request triggered 'workflow_run' builds
-#     "latest"        - in case of push builds
-# Push python image to GitHub registry with the push tag:
-#     X.Y-slim-buster-"${COMMIT_SHA}" - in case of pull-request triggered 
'workflow_run' builds
-#     X.Y-slim-buster                 - in case of push builds
-function push_pull_remove_images::push_python_image_to_github() {
-    local python_tag_suffix=""
-    if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} != "latest" ]]; then
-        python_tag_suffix="-${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
-    fi
-    docker_v tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \
-        "${AIRFLOW_PYTHON_BASE_IMAGE}${python_tag_suffix}"
-    push_pull_remove_images::push_image_with_retries \
-        "${AIRFLOW_PYTHON_BASE_IMAGE}${python_tag_suffix}"
-}
-
-# Pushes Ci images and their tags to registry in GitHub
-function push_pull_remove_images::push_ci_images_to_github() {
-    start_end::group_start "Push image"
-    if [[ "${PUSH_PYTHON_BASE_IMAGE=}" != "false" ]]; then
-        push_pull_remove_images::push_python_image_to_github
-    fi
-    docker_v tag "${AIRFLOW_CI_IMAGE}" 
"${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
-    push_pull_remove_images::push_image_with_retries 
"${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
-    # Also push ci manifest image if GITHUB_REGISTRY_PUSH_IMAGE_TAG is "latest"
-    if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then
-        local 
airflow_ci_manifest_tagged_image="${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}:latest"
-        docker_v tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" 
"${airflow_ci_manifest_tagged_image}"
-        push_pull_remove_images::push_image_with_retries 
"${airflow_ci_manifest_tagged_image}"
-    fi
-    start_end::group_end
-}
-
-# Pushes PROD image to registry in GitHub
-# Push image to GitHub registry with chosen push tag
-# the PUSH tag might be:
-#     "${COMMIT_SHA}" - in case of pull-request triggered 'workflow_run' builds
-#     "latest"        - in case of push builds
-function push_pull_remove_images::push_prod_images_to_github () {
-    if [[ "${PUSH_PYTHON_BASE_IMAGE=}" != "false" ]]; then
-        push_pull_remove_images::push_python_image_to_github
-    fi
-    local 
airflow_prod_tagged_image="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
-    docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_tagged_image}"
-    push_pull_remove_images::push_image_with_retries 
"${airflow_prod_tagged_image}"
-    # Also push prod build image if GITHUB_REGISTRY_PUSH_IMAGE_TAG is "latest"
-    if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then
-        local 
airflow_prod_build_tagged_image="${AIRFLOW_PROD_BUILD_IMAGE}:latest"
-        docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" 
"${airflow_prod_build_tagged_image}"
-        push_pull_remove_images::push_image_with_retries 
"${airflow_prod_build_tagged_image}"
-    fi
-}
-
 
 # waits for an image to be available in the GitHub registry
 function push_pull_remove_images::wait_for_image() {
@@ -261,7 +70,7 @@ function push_pull_remove_images::wait_for_image() {
     local count=0
     while true
     do
-        if push_pull_remove_images::pull_image_if_not_present_or_forced "$1"; 
then
+        if push_pull_remove_images::pull_image_if_missing "$1"; then
             break
         fi
         if [[ ${count} == "${MAX_TRIES}" ]]; then
@@ -276,9 +85,3 @@ function push_pull_remove_images::wait_for_image() {
     done
     set -e
 }
-
-function push_pull_remove_images::pull_image() {
-    start_end::group_start  "Pulling image: $1"
-    push_pull_remove_images::pull_image_if_not_present_or_forced "$1"
-    start_end::group_end
-}
diff --git a/scripts/ci/libraries/_spinner.sh b/scripts/ci/libraries/_spinner.sh
deleted file mode 100644
index 000a0a0..0000000
--- a/scripts/ci/libraries/_spinner.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Function to spin ASCII spinner during pull and build in pre-commits to give 
the user indication that
-# Pull/Build is happening. It only spins if the output log changes, so if 
pull/build is stalled
-# The spinner will not move.
-function spinner::spin() {
-    local file_to_monitor=${1}
-    SPIN=("-" "\\" "|" "/")
-    readonly SPIN
-    echo -n "
-Build log: ${file_to_monitor}
-" > "${DETECTED_TERMINAL}"
-
-    local last_step=""
-    while "true"
-    do
-      for i in "${SPIN[@]}"
-      do
-            echo -ne "\r${last_step}$i" > "${DETECTED_TERMINAL}"
-            local last_file_size
-            local file_size
-            last_file_size=$(set +e; wc -c "${file_to_monitor}" 2>/dev/null | 
awk '{print $1}' || true)
-            file_size=${last_file_size}
-            while [[ "${last_file_size}" == "${file_size}" ]];
-            do
-                file_size=$(set +e; wc -c "${file_to_monitor}" 2>/dev/null | 
awk '{print $1}' || true)
-                sleep 0.2
-            done
-            last_file_size=file_size
-            sleep 0.2
-            if [[ ! -f "${file_to_monitor}" ]]; then
-                exit
-            fi
-            local last_line
-            last_line=$(set +e; grep "Step" <"${file_to_monitor}" | tail -1 || 
true)
-            [[ ${last_line} =~ ^(Step [0-9/]*)\ : ]] && 
last_step="${BASH_REMATCH[1]} :"
-      done
-    done
-}
diff --git a/scripts/ci/libraries/_verbosity.sh 
b/scripts/ci/libraries/_verbosity.sh
index 68b356d..f4b1e39 100644
--- a/scripts/ci/libraries/_verbosity.sh
+++ b/scripts/ci/libraries/_verbosity.sh
@@ -57,7 +57,7 @@ function docker_v {
     if [[ ${PRINT_INFO_FROM_SCRIPTS} == "false" ]]; then
         ${DOCKER_BINARY_PATH} "${@}" >>"${OUTPUT_LOG}" 2>&1
     else
-        ${DOCKER_BINARY_PATH} "${@}" 1> >(tee -a "${OUTPUT_LOG}") 2> >(tee -a 
"${OUTPUT_LOG}" >&2)
+        "${DOCKER_BINARY_PATH}" "${@}"
     fi
     res="$?"
     if [[ ${res} == "0" || ${exit_on_error} == "false" ]]; then
diff --git a/scripts/ci/pre_commit/pre_commit_ci_build.sh 
b/scripts/ci/pre_commit/pre_commit_ci_build.sh
index 2d4dda3..0663801 100755
--- a/scripts/ci/pre_commit/pre_commit_ci_build.sh
+++ b/scripts/ci/pre_commit/pre_commit_ci_build.sh
@@ -22,8 +22,36 @@ export PRINT_INFO_FROM_SCRIPTS="false"
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+# PRe-commit version of confirming the ci image that is used in pre-commits
+# it displays additional information - what the user should do in order to 
bring the local images
+# back to state that pre-commit will be happy with
+function build_images::rebuild_ci_image_if_confirmed_for_pre_commit() {
+    local needs_docker_build="false"
+    export THE_IMAGE_TYPE="CI"
+
+    md5sum::check_if_docker_build_is_needed
+
+    if [[ ${needs_docker_build} == "true" ]]; then
+        verbosity::print_info
+        verbosity::print_info "Docker image pull and build is needed!"
+        verbosity::print_info
+    else
+        verbosity::print_info
+        verbosity::print_info "Docker image pull and build is not needed!"
+        verbosity::print_info
+    fi
+
+    if [[ "${needs_docker_build}" == "true" ]]; then
+        SKIP_REBUILD="false"
+        build_images::confirm_image_rebuild
+        if [[ ${SKIP_REBUILD} != "true" ]]; then
+            build_images::rebuild_ci_image_if_needed
+        fi
+    fi
+}
+
 build_images::forget_last_answer
 
 build_images::prepare_ci_build
 
-build_images::rebuild_ci_image_if_needed_and_confirmed
+build_images::rebuild_ci_image_if_confirmed_for_pre_commit
diff --git a/scripts/ci/tools/build_dockerhub.sh 
b/scripts/ci/tools/build_dockerhub.sh
index c520939..7941163 100755
--- a/scripts/ci/tools/build_dockerhub.sh
+++ b/scripts/ci/tools/build_dockerhub.sh
@@ -28,7 +28,6 @@ export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
 export INSTALL_PROVIDERS_FROM_SOURCES="false"
 export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
 export DOCKER_CACHE="local"
-export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="true"
 export 
DOCKER_TAG=${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}
 export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}"
 export AIRFLOW_CONSTRAINTS="constraints"
diff --git a/scripts/docker/common.sh b/scripts/docker/common.sh
index 36097d1..db60c0c 100755
--- a/scripts/docker/common.sh
+++ b/scripts/docker/common.sh
@@ -50,7 +50,7 @@ function common::override_pip_version_if_needed() {
 function common::get_constraints_location() {
     # auto-detect Airflow-constraint reference and location
     if [[ -z "${AIRFLOW_CONSTRAINTS_REFERENCE=}" ]]; then
-        if  [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then
+        if  [[ ${AIRFLOW_VERSION} =~ v?2.* && ! ${AIRFLOW_VERSION} =~ .*dev.* 
]]; then
             AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}
         else
             AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}
diff --git a/scripts/in_container/entrypoint_ci.sh 
b/scripts/in_container/entrypoint_ci.sh
index 1416149..83dc1e9 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -184,7 +184,6 @@ ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null
 
 cd "${AIRFLOW_SOURCES}"
 
-echo "START_AIRFLOW:=${START_AIRFLOW}"
 if [[ ${START_AIRFLOW:="false"} == "true" ]]; then
     export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS}
     export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES}

Reply via email to