This is an automated email from the ASF dual-hosted git repository. potiuk pushed a commit to branch switch-to-slash-convention-in-ghcr,.io in repository https://gitbox.apache.org/repos/asf/airflow.git
commit bbef29a138003e76d82ac1330e0ee2cba910d0d2 Author: Jarek Potiuk <[email protected]> AuthorDate: Sat Jul 31 11:48:15 2021 +0200 Switches to "/" convention in ghcr.io images We are using ghcr.io as image cache for our CI builds and Breeze and it seems ghcr.io is being "rebuilt" while running. We had been using "airflow-<branch>.." image convention before, bacause multiple nesting levels of images were not supported, however we experienced errors recently with pushing 2.1 images (https://issues.apache.org/jira/browse/INFRA-22124) and during investigation it turned out, that it is possible now to use "/" in the name of the image, and while it still does not introduce multiple nesting levels and folder structure, the UI of GitHub treats it like that and if you have image which starts wiht "airflow/", the airflow prefix is stripped out and you can also have even more "/" in then name to introduce further hierarchy. Since we have to change image naming convention due to (still unresolved) bug with no permission to push the v2-1-test image we've decided to change naming convention for all our cache images to follow this - now available - "/" connvention to make it better structured and easier to manage/understand. Also some optimisations are implemented - Python, prod-build and ci-manifest images are only pushed when "latest" image is prepared. They are not needed for the COMMIT builds because we only need final images for those builds. This simplified the code quite a bit. Documentation is updated reflecting those changes. --- .dockerignore | 4 +- .github/workflows/build-images.yml | 10 ++ .github/workflows/ci.yml | 161 ++++++++--------------- CI.rst | 51 +++---- IMAGES.rst | 22 ++-- breeze | 17 +-- dev/retag_docker_images.py | 9 +- scripts/ci/images/ci_prepare_ci_image_on_ci.sh | 11 -- scripts/ci/images/ci_prepare_prod_image_on_ci.sh | 23 +--- scripts/ci/libraries/_build_images.sh | 49 ++++--- scripts/ci/libraries/_initialization.sh | 16 +-- scripts/ci/libraries/_kind.sh | 16 +-- scripts/ci/libraries/_push_pull_remove_images.sh | 100 ++++++++------ scripts/ci/libraries/_script_init.sh | 2 +- 14 files changed, 210 insertions(+), 281 deletions(-) diff --git a/.dockerignore b/.dockerignore index d10cfbc..f6113e2 100644 --- a/.dockerignore +++ b/.dockerignore @@ -40,9 +40,6 @@ !scripts/in_container !scripts/docker -# Add provider packages to the context -!provider_packages - # Add tests and kubernetes_tests to context. !tests !kubernetes_tests @@ -129,3 +126,4 @@ airflow/www/static/docs # Exclude docs generated files docs/_build/ docs/_api/ +docs/_doctrees/ diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index f29e199..92f8d1c 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -230,8 +230,11 @@ jobs: VERSION_SUFFIX_FOR_PYPI: ".dev0" steps: - name: Set envs + # Set pull image tag for CI image build, in order to pull the image pushed + # Just a moment ago by build-ci-images job run: | echo "GITHUB_REGISTRY_PUSH_IMAGE_TAG=${TARGET_COMMIT_SHA}" >> "$GITHUB_ENV" + echo "GITHUB_REGISTRY_PULL_IMAGE_TAG=${TARGET_COMMIT_SHA}" >> "$GITHUB_ENV" - uses: actions/checkout@v2 with: ref: ${{ env.TARGET_COMMIT_SHA }} @@ -279,10 +282,17 @@ jobs: # Pull images built in the previous step env: GITHUB_REGISTRY_WAIT_FOR_IMAGE: "true" + # Here we are using PULL_IMAGE_TAG set in the environment variables above - name: "Build PROD images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}" run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh + env: + # GITHUB_REGISTRY_PULL_IMAGE_TAG is overriden to latest in order to build PROD image using "latest" + GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest" - name: "Push PROD images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}" run: ./scripts/ci/images/ci_push_production_images.sh + env: + # GITHUB_REGISTRY_PULL_IMAGE_TAG is overriden to latest in order to build PROD image using "latest" + GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest" cancel-on-ci-build: permissions: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd8c2d2..bb77fb9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1103,110 +1103,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" path: /tmp/kind_logs_* retention-days: 7 - push-prod-images-to-github-registry: - permissions: - packages: write - timeout-minutes: 10 - name: "Push PROD images as cache to GitHub Registry" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} - needs: - - build-info - - static-checks - - tests-sqlite - - tests-postgres - - tests-mysql - - tests-mssql - - tests-kubernetes - - prod-images - - docs - if: > - (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' || - github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') && - github.event_name != 'schedule' - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} - GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest" - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - - name: "Setup python" - uses: actions/setup-python@v2 - with: - python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} - - name: "Free space" - run: ./scripts/ci/tools/free_space.sh - - name: Set push-python-image - id: push-python-image - run: | - if [[ "${REF}" == 'refs/head/main' || "${REF}" == 'refs/head/main' ]]; then - echo "::set-output name=wanted::true" - else - echo "::set-output name=wanted::false" - fi - env: - REF: ${{ github.ref }} - - name: - "Prepare PROD image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh - env: - # Since we are going to push both final image and build image segment, we need to pull the - # build image, in case we are pulling from registry rather than building. - WAIT_FOR_PROD_BUILD_IMAGE: "true" - WAIT_FOR_PYTHON_BASE_IMAGE: ${{ steps.push-python-image.outputs.wanted}} - - name: "Push PROD images ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_push_production_images.sh - env: - PUSH_PYTHON_BASE_IMAGE: ${{ steps.push-python-image.outputs.wanted}} - - push-ci-images-to-github-registry: - permissions: - packages: write - timeout-minutes: 10 - name: "Push CI images as cache to GitHub Registry" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} - needs: - - build-info - - static-checks - - tests-sqlite - - tests-postgres - - tests-mysql - - tests-mssql - - tests-kubernetes - - ci-images - - docs - if: > - (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' || - github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') && - github.event_name != 'schedule' - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} - GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest" - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - - name: "Setup python" - uses: actions/setup-python@v2 - with: - python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} - - name: "Free space" - run: ./scripts/ci/tools/free_space.sh - - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Push CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_push_ci_images.sh - constraints: permissions: contents: write @@ -1227,10 +1123,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: ${{needs.build-info.outputs.pythonVersionsListAsString}} - # Only run it for direct pushes - if: > - github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' || - github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test' + # Only run it for direct pushes and scheduled builds + if: github.event_name == 'push' || github.event_name == 'schedule' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -1264,17 +1158,68 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - name: "Set constraints branch name" id: constraints-branch run: ./scripts/ci/constraints/ci_branch_constraints.sh + # only actually push it when we are in apache/airflow repository - name: Checkout ${{ steps.constraints-branch.outputs.branch }} uses: actions/checkout@v2 + if: github.repository == 'apache/airflow' with: path: "repo" ref: ${{ steps.constraints-branch.outputs.branch }} persist-credentials: false - name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}" run: ./scripts/ci/constraints/ci_commit_constraints.sh + if: github.repository == 'apache/airflow' - name: "Push changes" uses: ./.github/actions/github-push-action + if: github.repository == 'apache/airflow' with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: ${{ steps.constraints-branch.outputs.branch }} directory: "repo" + + # Push images to GitHub Registry in Apache repository, if all tests are successful and build + # is executed as result of direct push to "main" or one of the "test" branches + # It actually rebuilds all images using just-pushed constraints if they changed + # It will also check if a new python image was released and will pull the latest one if needed + # Same as build-images.yaml + push-images-to-github-registry: + permissions: + packages: write + timeout-minutes: 10 + name: "Push PROD images as cache to GitHub Registry" + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + needs: + - build-info + - constraints + - docs + # Only run it for direct pushes and scheduled builds + if: github.event_name == 'push' || github.event_name == 'schedule' + strategy: + matrix: + python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} + env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} + GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest" + GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest" + PUSH_PYTHON_BASE_IMAGE: "true" + CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: "true" + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v2 + with: + persist-credentials: false + - name: "Setup python" + uses: actions/setup-python@v2 + with: + python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} + - name: "Free space" + run: ./scripts/ci/tools/free_space.sh + - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:latest" + run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh + - name: "Prepare PROD image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:latest" + run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh + - name: "Push CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:latest" + run: ./scripts/ci/images/ci_push_ci_images.sh + - name: "Push PROD images ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:latest" + run: ./scripts/ci/images/ci_push_production_images.sh diff --git a/CI.rst b/CI.rst index fedd300..6a704d0 100644 --- a/CI.rst +++ b/CI.rst @@ -568,12 +568,11 @@ This workflow is a regular workflow that performs all checks of Airflow code. +---------------------------+----------------------------------------------+-------+-------+------+ | Tests Kubernetes | Run Kubernetes test | Yes(2)| Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ -| Push PROD images | Pushes PROD images to GitHub Registry (4) | - | Yes | - | -+---------------------------+----------------------------------------------+-------+-------+------+ -| Push CI images | Pushes CI images to GitHub Registry (4) | - | Yes | - | -+---------------------------+----------------------------------------------+-------+-------+------+ | Constraints | Upgrade constraints to latest ones (4) | - | Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ +| Push images | Pushes latest images to GitHub Registry (4) | - | Yes | Yes | ++---------------------------+----------------------------------------------+-------+-------+------+ + Comments: @@ -584,8 +583,8 @@ Comments: You can set it to "false" to disable using shared images - this is slower though as the images are rebuilt in every job that needs them. (4) PROD and CI images are pushed as "latest" to GitHub Container registry and constraints are upgraded - only if all tests are successful. Note that images are not pushed in CRON jobs because they are rebuilt - from scratch and we want to push incremental changes to the Github Container registry. + only if all tests are successful. The images are rebuilt in this step using constraints pushed + in the previous step. CodeQL scan ----------- @@ -620,7 +619,9 @@ with the COMMIT_SHA id for images that were used in particular build. The image names follow the patterns (except the Python image, all the images are stored in https://ghcr.io/ in ``apache`` organization. -The packages are available under: +The packages are available under (CONTAINER_NAME is url-encoded name of the image). Note that "/" are +supported now in the ``ghcr.io`` as apart of the image name within ``apache`` organization, but they +have to be percent-encoded when you access them via UI (/ = %2F) ``https://github.com/apache/airflow/pkgs/container/<CONTAINER_NAME>`` @@ -631,26 +632,30 @@ The packages are available under: | (DockerHub) | | Python maintainer release new versions of those image | | | | with security fixes every few weeks in DockerHub. | +--------------+----------------------------------------------------------+----------------------------------------------------------+ -| Airflow | airflow-python-v2:<X.Y>-slim-buster | Version of python base image used in Airflow Builds | -| python base | or | We keep the "latest" version there and also each build | -| image | airflow-python-v2:<X.Y>-slim-buster-<COMMIT_SHA> | has an associated specific python version that was used. | +| Airflow | airflow/<BRANCH>/python:<X.Y>-slim-buster | Version of python base image used in Airflow Builds | +| python base | | We keep the "latest" version only to mark last "good" | +| image | | python base that went through testing and was pushed. | +--------------+----------------------------------------------------------+----------------------------------------------------------+ -| CI image | airflow-<BRANCH>-python<X.Y>-ci-v2:latest | CI image - this is the image used for most of the tests. | -| | or | Contains all provider dependencies and tools useful | -| | airflow-<BRANCH>-python<X.Y>-ci-v2:<COMMIT_SHA> | For testing. This image is used in Breeze. | +| PROD Build | airflow/<BRANCH>/prod-build/python<X.Y>:latest | Production Build image - this is the "build" stage of | +| image | | production image. It contains build-essentials and all | +| | | necessary apt packages to build/install PIP packages. | +| | | We keep the "latest" version only to speed up builds. | +--------------+----------------------------------------------------------+----------------------------------------------------------+ -| Manifest | airflow-<BRANCH>-python<X.Y>-ci-v2-manifest:latest | CI manifest image - this is the image used to optimize | -| CI image | or | pulls and builds for Breeze development environment | -| | airflow-<BRANCH>-python<X.Y>-ci-v2-manifest:<COMMIT_SHA> | They store hash indicating whether the image will be | +| Manifest | airflow/<BRANCH>/ci-manifest/python<X.Y>:latest | CI manifest image - this is the image used to optimize | +| CI image | | pulls and builds for Breeze development environment | +| | | They store hash indicating whether the image will be | | | | faster to build or pull. | +| | | We keep the "latest" version only to help breeze to | +| | | check if new image should be pulled. | +--------------+----------------------------------------------------------+----------------------------------------------------------+ -| PROD Build | airflow-<BRANCH>-python<X.Y>-build-v2:latest | Production Build image - this is the "build" segment of | -| image | or | production image. It contains build-essentials and all | -| | airflow-<BRANCH>-python<X.Y>-build-v2:<COMMIT_SHA> | necessary packages to install PIP packages. | +| CI image | airflow/<BRANCH>/ci/python<X.Y>:latest | CI image - this is the image used for most of the tests. | +| | or | Contains all provider dependencies and tools useful | +| | airflow/<BRANCH>/ci/python<X.Y>:<COMMIT_SHA> | For testing. This image is used in Breeze. | +--------------+----------------------------------------------------------+----------------------------------------------------------+ -| PROD image | airflow-<BRANCH>-python<X.Y>-v2:latest | Production image. This is the actual production image | +| | | faster to build or pull. | +| PROD image | airflow/<BRANCH>/prod/python<X.Y>:latest | Production image. This is the actual production image | | | or | optimized for size. | -| | airflow-<BRANCH>-python<X.Y>-v2:<COMMIT_SHA> | It contains only compiled libraries and minimal set of | +| | airflow/<BRANCH>/prod/python<X.Y>:<COMMIT_SHA> | It contains only compiled libraries and minimal set of | | | | dependencies to run Airflow. | +--------------+----------------------------------------------------------+----------------------------------------------------------+ @@ -668,9 +673,9 @@ For example knowing that the CI build was for commit ``cd27124534b46c9688a1d89e7 .. code-block:: bash - docker pull ghcr.io/apache/airflow-main-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker pull ghcr.io/apache/airflow/main/ci/python3.6:cd27124534b46c9688a1d89e75fcd137ab5137e3 - docker run -it ghcr.io/apache/airflow-main-python3.6-ci:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker run -it ghcr.io/apache/airflow/main/ci/python3.6:cd27124534b46c9688a1d89e75fcd137ab5137e3 But you usually need to pass more variables and complex setup if you want to connect to a database or diff --git a/IMAGES.rst b/IMAGES.rst index 82e6989..9e78450 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -246,19 +246,21 @@ Images with a commit SHA (built for pull requests and pushes) .. code-block:: bash - ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-ci-v2:<COMMIT_SHA> - for CI images - ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-v2:<COMMIT_SHA> - for production images - ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-build-v2:<COMMIT_SHA> - for production build stage - ghcr.io/apache/airflow-python-v2:X.Y-slim-buster-<COMMIT_SHA> - for base Python images + ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:<COMMIT_SHA> - for CI images + ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:<COMMIT_SHA> - for production images + +We do not push Base Python images and prod-build images when we prepare COMMIT builds, because those +images are never rebuilt locally, so there is no need to store base images specific for those builds. Latest images (pushed when main merge succeeds): .. code-block:: bash - ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-ci-v2:latest - for CI images - ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-v2:latest - for production images - ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-build-v2:latest - for production build stage - ghcr.io/apache/airflow-python-v2:X.Y-slim-buster - for base Python images + ghcr.io/apache/airflow/<BRANCH>/python:<X.Y>-slim-buster - for base Python images + ghcr.io/apache/airflow/<BRANCH>/ci/python<X.Y>:latest - for CI images + ghcr.io/apache/airflow/<BRANCH>/ci-manifest/python<X.Y>:latest - for CI Manifest images + ghcr.io/apache/airflow/<BRANCH>/prod/python<X.Y>:latest - for production images + ghcr.io/apache/airflow/<BRANCH>/prod-build/python<X.Y>:latest - for production build stage You can see all the current GitHub images at `<https://github.com/apache/airflow/packages>`_ @@ -558,8 +560,8 @@ way of querying image details via API. You really need to download the image to We workaround it in the way that always when we build the image we build a very small image manifest containing randomly generated UUID and push it to registry together with the main CI image. The tag for the manifest image reflects the image it refers to with added ``-manifest`` suffix. -The manifest image for ``ghcr.io/apache/airflow-main-python3.6-ci-v2`` is named -``ghcr.io/apache/airflow-main-python3.6-ci-v2-manifest``. +The manifest image for ``ghcr.io/apache/airflow/main/ci/python3.6`` is named +``ghcr.io/apache/airflow/main/ci-manifest/python3.6``. The image is quickly pulled (it is really, really small) when important files change and the content of the randomly generated UUID is compared with the one in our image. If the contents are different diff --git a/breeze b/breeze index 7aa4295..b0395f0 100755 --- a/breeze +++ b/breeze @@ -164,6 +164,9 @@ function breeze::setup_default_breeze_constants() { # Can be overridden by '--force-build-images' flag. export FORCE_BUILD_IMAGES="false" + # When we push from breeze we always want to push base python images + export PUSH_PYTHON_BASE_IMAGE="true" + # Determines whether to reinstall airflow at entering the image. export USE_AIRFLOW_VERSION="" # if set to true, the ci image will look for wheel packages in dist folder and will install them @@ -569,8 +572,7 @@ EOF # AIRFLOW_SOURCES # AIRFLOW_CI_IMAGE # AIRFLOW_PROD_IMAGE -# AIRFLOW_PROD_IMAGE_KUBERNETES -# AIRFLOW_PROD_BASE_TAG +# AIRFLOW_IMAGE_KUBERNETES # SQLITE_URL # # Arguments: @@ -633,8 +635,7 @@ export MYSQL_VERSION="${MYSQL_VERSION}" export AIRFLOW_SOURCES="${AIRFLOW_SOURCES}" export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}" export AIRFLOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}" -export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE_KUBERNETES}" -export AIRFLOW_PROD_BASE_TAG="${AIRFLOW_PROD_BASE_TAG}" +export AIRFLOW_IMAGE_KUBERNETES="${AIRFLOW_IMAGE_KUBERNETES}" export SQLITE_URL="${SQLITE_URL}" export USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION}" export USE_PACKAGES_FROM_DIST="${USE_PACKAGES_FROM_DIST}" @@ -650,7 +651,6 @@ EOF # # Global constants set: # -# PYTHON_BASE_IMAGE_VERSION # PYTHON_BASE_IMAGE # AIRFLOW_CI_IMAGE # BUILT_CI_IMAGE_FLAG_FILE @@ -956,7 +956,6 @@ function breeze::parse_arguments() { echo export DOCKER_CACHE="disabled" # if not set here, docker cached is determined later, depending on type of image to be build - readonly DOCKER_CACHE export FORCE_BUILD_IMAGES="true" shift ;; @@ -972,7 +971,6 @@ function breeze::parse_arguments() { echo export DOCKER_CACHE="local" # if not set here, docker cached is determined later, depending on type of image to be build - readonly DOCKER_CACHE shift ;; -U | --build-cache-pulled) @@ -980,14 +978,12 @@ function breeze::parse_arguments() { echo export DOCKER_CACHE="pulled" # if not set here, docker cached is determined later, depending on type of image to be build - readonly DOCKER_CACHE shift ;; -X | --build-cache-disabled) echo "Use disabled cache to build images" echo export DOCKER_CACHE="disabled" - readonly DOCKER_CACHE # if not set here, docker cached is determined later, depending on type of image to be build shift ;; @@ -1148,7 +1144,6 @@ function breeze::parse_arguments() { export CHECK_IMAGE_FOR_REBUILD="false" export SKIP_BUILDING_PROD_IMAGE="true" export SKIP_CHECK_REMOTE_IMAGE="true" - export FAIL_ON_GITHUB_DOCKER_PULL_ERROR="true" shift 2 ;; --init-script) @@ -3617,7 +3612,7 @@ breeze::check_and_save_all_params build_images::determine_docker_cache_strategy -build_images::get_docker_image_names +build_images::get_docker_cache_image_names initialization::make_constants_read_only diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index 5eeda8e..f29ce1b 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -36,10 +36,11 @@ PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] GHCR_IO_PREFIX = "ghcr.io/apache/airflow" GHCR_IO_IMAGES = [ - "{prefix}-{branch}-python{python_version}-ci-v2-manifest:latest", - "{prefix}-{branch}-python{python_version}-ci-v2:latest", - "{prefix}-{branch}-python{python_version}-v2:latest", - "{prefix}-{branch}-python{python_version}-build-v2:latest", + "{prefix}/{branch}/ci-manifest/python{python_version}:latest", + "{prefix}/{branch}/ci/python{python_version}:latest", + "{prefix}/{branch}/prod-build/python{python_version}-build-v2:latest", + "{prefix}/{branch}/prod/python{python_version}-build-v2:latest", + "{prefix}/{branch}/python:{python_version}-slim-buster", ] diff --git a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh index a550038..77c3143 100755 --- a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh +++ b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh @@ -33,20 +33,9 @@ function build_ci_image_on_ci() { # Tries to wait for the images indefinitely # skips further image checks - since we already have the target image - - local python_tag_suffix="" - if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then - python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - fi - # first we pull base python image. We will need it to re-push it after main build - # Becoming the new "latest" image for other builds - build_images::wait_for_image_tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \ - "${python_tag_suffix}" - # And then the actual image build_images::wait_for_image_tag "${AIRFLOW_CI_IMAGE}" \ ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - md5sum::update_all_md5_with_group else build_images::rebuild_ci_image_if_needed diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh index dbcb07d..320f454 100755 --- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh +++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh @@ -35,29 +35,8 @@ function build_prod_images_on_ci() { # Tries to wait for the images indefinitely # skips further image checks - since we already have the target image - local python_tag_suffix="" - if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then - python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - fi - - if [[ "${WAIT_FOR_PYTHON_BASE_IMAGE=}" == "true" ]]; then - # first we pull base python image. We will need it to re-push it after main build - # Becoming the new "latest" image for other builds - build_images::wait_for_image_tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \ - "${python_tag_suffix}" - fi - # And then the actual image - build_images::wait_for_image_tag "${AIRFLOW_PROD_IMAGE}" \ - ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - - # And the prod build image - if [[ "${WAIT_FOR_PROD_BUILD_IMAGE=}" == "true" ]]; then - # If specified in variable - also waits for the build image - build_images::wait_for_image_tag "${AIRFLOW_PROD_BUILD_IMAGE}" \ - ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - fi - + build_images::wait_for_image_tag "${AIRFLOW_PROD_IMAGE}" ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" else build_images::build_prod_images_from_locally_built_airflow_packages fi diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index ca94e4c..bac1e65 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -251,7 +251,6 @@ EOF # Retrieves information about build cache hash random file from the local image # function build_images::get_local_build_cache_hash() { - set +e # Remove the container just in case docker_v rm --force "local-airflow-ci-container" 2>/dev/null >/dev/null @@ -262,6 +261,7 @@ function build_images::get_local_build_cache_hash() { LOCAL_MANIFEST_IMAGE_UNAVAILABLE="true" export LOCAL_MANIFEST_IMAGE_UNAVAILABLE touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" + set -e return fi @@ -296,6 +296,7 @@ function build_images::get_remote_image_build_cache_hash() { REMOTE_DOCKER_REGISTRY_UNREACHABLE="true" export REMOTE_DOCKER_REGISTRY_UNREACHABLE touch "${REMOTE_IMAGE_BUILD_CACHE_HASH_FILE}" + set -e return fi set -e @@ -358,49 +359,44 @@ function build_images::get_github_container_registry_image_prefix() { echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]' } -function build_images::get_docker_image_names() { - # python image version to use - export PYTHON_BASE_IMAGE_VERSION=${PYTHON_BASE_IMAGE_VERSION:=${PYTHON_MAJOR_MINOR_VERSION}} - +function build_images::get_docker_cache_image_names() { # Python base image to use - export PYTHON_BASE_IMAGE="python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster" + export PYTHON_BASE_IMAGE="python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster" local image_name image_name="${GITHUB_REGISTRY}/$(build_images::get_github_container_registry_image_prefix)" - # CI image base tag - export AIRFLOW_CI_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci" + # Example: + # ghcr.io/apache/airflow/main/python:3.8-slim-buster + export AIRFLOW_PYTHON_BASE_IMAGE="${image_name}/${BRANCH_NAME}/python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster" # Example: - # ghcr.io/apache/airflow-main-python3.8-ci-v2 - export AIRFLOW_CI_IMAGE="${image_name}-${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}" + # ghcr.io/apache/airflow/main/ci/python3.8 + export AIRFLOW_CI_IMAGE="${image_name}/${BRANCH_NAME}/ci/python${PYTHON_MAJOR_MINOR_VERSION}" - export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local-airflow-ci-manifest:${AIRFLOW_CI_BASE_TAG}" + # Example: + # local-airflow-ci-manifest/main/python3.8 + export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local-airflow-ci-manifest/${BRANCH_NAME}/python${PYTHON_MAJOR_MINOR_VERSION}" # Example: - # ghcr.io/apache/airflow-main-python3.8-ci-v2-manifest - export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${image_name}-${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}-manifest" + # ghcr.io/apache/airflow/main/ci-manifest/python3.8 + export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${image_name}/${BRANCH_NAME}/ci-manifest/python${PYTHON_MAJOR_MINOR_VERSION}" # File that is touched when the CI image is built for the first time locally export BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}" - # PROD image to build - export AIRFLOW_PROD_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}" - # Example: - # ghcr.io/apache/airflow-v2-1-test-python-v2:3.6-slim-buster - export AIRFLOW_PROD_IMAGE="${image_name}-${AIRFLOW_PROD_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}" - - # PROD Kubernetes image to build - export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE}-kubernetes" + # ghcr.io/apache/airflow/main/prod/python3.8 + export AIRFLOW_PROD_IMAGE="${image_name}/${BRANCH_NAME}/prod/python${PYTHON_MAJOR_MINOR_VERSION}" # Example: - # ghcr.io/apache/airflow-main-python3.6-build-v2 - export AIRFLOW_PROD_BUILD_IMAGE="${image_name}-${AIRFLOW_PROD_BASE_TAG}-build${GITHUB_REGISTRY_IMAGE_SUFFIX}" + # ghcr.io/apache/airflow/main/prod-build/python3.8 + export AIRFLOW_PROD_BUILD_IMAGE="${image_name}/${BRANCH_NAME}/prod-build/python${PYTHON_MAJOR_MINOR_VERSION}" + + # Kubernetes image to build + # ghcr.io/apache/airflow/main/kubernetes/python3.8 + export AIRFLOW_IMAGE_KUBERNETES="${image_name}/${BRANCH_NAME}/kubernetes/python${PYTHON_MAJOR_MINOR_VERSION}" - # Example: - # ghcr.io/apache/airflow-python-v2:3.6-slim-buster - export AIRFLOW_PYTHON_BASE_IMAGE="${image_name}-python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster" } @@ -927,7 +923,6 @@ function build_images::determine_docker_cache_strategy() { export DOCKER_CACHE="pulled" fi fi - readonly DOCKER_CACHE verbosity::print_info verbosity::print_info "Using ${DOCKER_CACHE} cache strategy for the build." verbosity::print_info diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index fefcb1a..8fdb283 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -174,6 +174,10 @@ function initialization::initialize_base_variables() { # Dry run - only show docker-compose and docker commands but do not execute them export DRY_RUN_DOCKER=${DRY_RUN_DOCKER:="false"} + + # By default we only push built ci/prod images - base python images are only pushed + # When requested + export PUSH_PYTHON_BASE_IMAGE=${PUSH_PYTHON_BASE_IMAGE:="false"} } # Determine current branch @@ -303,9 +307,6 @@ function initialization::initialize_force_variables() { # Can be set to true to skip if the image is newer in registry export SKIP_CHECK_REMOTE_IMAGE=${SKIP_CHECK_REMOTE_IMAGE:="false"} - - # Should be set to true if you expect image frm GitHub to be present and downloaded - export FAIL_ON_GITHUB_DOCKER_PULL_ERROR=${FAIL_ON_GITHUB_DOCKER_PULL_ERROR:="false"} } # Determine information about the host @@ -556,7 +557,6 @@ function initialization::initialize_git_variables() { function initialization::initialize_github_variables() { # Defaults for interacting with GitHub export GITHUB_REGISTRY="ghcr.io" - export GITHUB_REGISTRY_IMAGE_SUFFIX=${GITHUB_REGISTRY_IMAGE_SUFFIX:="-v2"} export GITHUB_REGISTRY_WAIT_FOR_IMAGE=${GITHUB_REGISTRY_WAIT_FOR_IMAGE:="false"} export GITHUB_REGISTRY_PULL_IMAGE_TAG=${GITHUB_REGISTRY_PULL_IMAGE_TAG:="latest"} export GITHUB_REGISTRY_PUSH_IMAGE_TAG=${GITHUB_REGISTRY_PUSH_IMAGE_TAG:="latest"} @@ -655,7 +655,6 @@ Force variables: FORCE_BUILD_IMAGES: ${FORCE_BUILD_IMAGES} FORCE_ANSWER_TO_QUESTIONS: ${FORCE_ANSWER_TO_QUESTIONS} SKIP_CHECK_REMOTE_IMAGE: ${SKIP_CHECK_REMOTE_IMAGE} - FAIL_ON_GITHUB_DOCKER_PULL_ERROR: ${FAIL_ON_GITHUB_DOCKER_PULL_ERROR} Host variables: @@ -850,8 +849,6 @@ function initialization::make_constants_read_only() { readonly ADDITIONAL_RUNTIME_APT_DEPS readonly ADDITIONAL_RUNTIME_APT_ENV - readonly DOCKER_CACHE - readonly GITHUB_REGISTRY readonly GITHUB_REGISTRY_WAIT_FOR_IMAGE readonly GITHUB_REGISTRY_PULL_IMAGE_TAG @@ -867,11 +864,8 @@ function initialization::make_constants_read_only() { readonly VERSION_SUFFIX_FOR_PYPI - readonly PYTHON_BASE_IMAGE_VERSION readonly PYTHON_BASE_IMAGE - readonly AIRFLOW_CI_BASE_TAG - readonly AIRFLOW_PROD_BASE_TAG - readonly AIRFLOW_PROD_IMAGE_KUBERNETES + readonly AIRFLOW_IMAGE_KUBERNETES readonly BUILT_CI_IMAGE_FLAG_FILE readonly INIT_SCRIPT_FILE diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh index d4910d9..1fb77eb 100644 --- a/scripts/ci/libraries/_kind.sh +++ b/scripts/ci/libraries/_kind.sh @@ -262,8 +262,8 @@ function kind::build_image_for_kubernetes_tests() { if [[ -n ${GITHUB_REGISTRY_PULL_IMAGE_TAG=} ]]; then image_tag="${GITHUB_REGISTRY_PULL_IMAGE_TAG}" fi - echo "Building ${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest from ${AIRFLOW_PROD_IMAGE}:${image_tag}" - docker_v build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest" . -f - <<EOF + echo "Building ${AIRFLOW_IMAGE_KUBERNETES}:latest from ${AIRFLOW_PROD_IMAGE}:${image_tag}" + docker_v build --tag "${AIRFLOW_IMAGE_KUBERNETES}:latest" . -f - <<EOF FROM ${AIRFLOW_PROD_IMAGE}:${image_tag} COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/ @@ -271,11 +271,11 @@ COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/ COPY airflow/kubernetes_executor_templates/ \${AIRFLOW_HOME}/pod_templates/ EOF - echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES}:${image_tag} is prepared for test kubernetes deployment." + echo "The ${AIRFLOW_IMAGE_KUBERNETES}:${image_tag} is prepared for test kubernetes deployment." } function kind::load_image_to_kind_cluster() { - kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest" + kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_IMAGE_KUBERNETES}:latest" } MAX_NUM_TRIES_FOR_HEALTH_CHECK=12 @@ -343,8 +343,8 @@ function kind::deploy_airflow_with_helm() { helm install airflow . \ --timeout 10m0s \ --namespace "${HELM_AIRFLOW_NAMESPACE}" \ - --set "defaultAirflowRepository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \ - --set "images.airflow.repository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \ + --set "defaultAirflowRepository=${AIRFLOW_IMAGE_KUBERNETES}" \ + --set "images.airflow.repository=${AIRFLOW_IMAGE_KUBERNETES}" \ --set "images.airflow.tag=latest" -v 1 \ --set "defaultAirflowTag=latest" -v 1 \ --set "config.api.auth_backend=airflow.api.auth.backend.basic_auth" \ @@ -376,8 +376,8 @@ function kind::upgrade_airflow_with_helm() { helm repo add stable https://charts.helm.sh/stable/ helm dep update helm upgrade airflow . --namespace "${HELM_AIRFLOW_NAMESPACE}" \ - --set "defaultAirflowRepository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \ - --set "images.airflow.repository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \ + --set "defaultAirflowRepository=${AIRFLOW_IMAGE_KUBERNETES}" \ + --set "images.airflow.repository=${AIRFLOW_IMAGE_KUBERNETES}" \ --set "images.airflow.tag=latest" -v 1 \ --set "defaultAirflowTag=latest" -v 1 \ --set "config.api.auth_backend=airflow.api.auth.backend.basic_auth" \ diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh index 0e99b10..1bc3396 100644 --- a/scripts/ci/libraries/_push_pull_remove_images.sh +++ b/scripts/ci/libraries/_push_pull_remove_images.sh @@ -62,25 +62,6 @@ function push_pull_remove_images::pull_image_if_not_present_or_forced() { echo "Pulling the image ${image_to_pull}" echo docker_v pull "${image_to_pull}" - local exit_value="$?" - if [[ ${exit_value} != "0" && ${FAIL_ON_GITHUB_DOCKER_PULL_ERROR} == "true" ]]; then - echo - echo """ -${COLOR_RED}ERROR: Exiting on docker pull error - -If you have authorisation problems, you might want to run: - -docker login ${image_to_pull%%\/*} - -You need to use generate token as the password, not your personal password. -You can generate one at https://github.com/settings/tokens -Make sure to choose 'read:packages' scope. -${COLOR_RESET} -""" - exit ${exit_value} - fi - echo - return ${exit_value} fi } @@ -90,7 +71,7 @@ function push_pull_remove_images::check_and_rebuild_python_base_image_if_needed( local dockerhub_python_version dockerhub_python_version=$(docker run "${PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)') local local_python_version - local_python_version=$(docker run "${AIRFLOW_PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)') + local_python_version=$(docker run "${AIRFLOW_PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)' || true) if [[ ${local_python_version} != "${dockerhub_python_version}" ]]; then echo echo "There is a new Python Base image updated!" @@ -116,10 +97,10 @@ function push_pull_remove_images::check_and_rebuild_python_base_image_if_needed( # it will pull the right image using the specified suffix function push_pull_remove_images::pull_base_python_image() { echo - echo "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}" + echo "Docker pull base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}" echo if [[ -n ${DETECTED_TERMINAL=} ]]; then - echo -n "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES} + echo -n "Docker pull base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES} " > "${DETECTED_TERMINAL}" fi if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then @@ -132,8 +113,14 @@ function push_pull_remove_images::pull_base_python_image() { return 1 fi else + set +e push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PYTHON_BASE_IMAGE}" - if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]] ; then + local res="$?" + set -e + if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" || ${res} != "0" ]] ; then + # Rebuild the base python image using DockerHub - either when we explicitly want it + # or when there is no image available yet in ghcr.io (usually when you build it for the + # first time in your repository push_pull_remove_images::check_and_rebuild_python_base_image_if_needed fi fi @@ -151,8 +138,24 @@ function push_pull_remove_images::pull_ci_images_if_needed() { fi fi if [[ "${DOCKER_CACHE}" == "pulled" ]]; then + set +e push_pull_remove_images::pull_image_if_not_present_or_forced \ "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + local res="$?" + set -e + if [[ ${res} != "0" && ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]] ; then + echo + echo "The CI image cache does not exist. This is likely the first time you build the image" + echo "Switching to 'local' cache for docker images" + echo + DOCKER_CACHE="local" + else + echo + echo "The CI image cache does not exist and we want to pull tag ${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + echo "Failing as we have to pull the tagged image in order to continue" + echo + return "${res}" + fi fi } @@ -169,12 +172,31 @@ function push_pull_remove_images::pull_prod_images_if_needed() { fi fi if [[ "${DOCKER_CACHE}" == "pulled" ]]; then + set +e # "Build" segment of production image push_pull_remove_images::pull_image_if_not_present_or_forced \ "${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - # "Main" segment of production image - push_pull_remove_images::pull_image_if_not_present_or_forced \ - "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + local res="$?" + if [[ ${res} == "0" ]]; then + # "Main" segment of production image + push_pull_remove_images::pull_image_if_not_present_or_forced \ + "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + res="$?" + fi + set -e + if [[ ${res} != "0" && ${GITHUB_REGISTRY_PULL_IMAGE_TAG} == "latest" ]] ; then + echo + echo "The PROD image cache does not exist. This is likely the first time you build the image" + echo "Switching to 'local' cache for docker images" + echo + DOCKER_CACHE="local" + else + echo + echo "The PROD image cache does not exist and we want to pull tag ${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + echo "Failing as we have to pull the tagged image in order to continue" + echo + return "${res}" + fi fi } @@ -203,17 +225,12 @@ function push_pull_remove_images::push_ci_images_to_github() { local airflow_ci_tagged_image="${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" docker_v tag "${AIRFLOW_CI_IMAGE}" "${airflow_ci_tagged_image}" push_pull_remove_images::push_image_with_retries "${airflow_ci_tagged_image}" + # Also push ci manifest iumage image if GITHUB_REGISTRY_PUSH_IMAGE_TAG is "latest" if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then - local airflow_ci_manifest_tagged_image="${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" + local airflow_ci_manifest_tagged_image="${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}:latest" docker_v tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" "${airflow_ci_manifest_tagged_image}" push_pull_remove_images::push_image_with_retries "${airflow_ci_manifest_tagged_image}" fi - if [[ -n ${GITHUB_SHA=} ]]; then - # Also push image to GitHub registry with commit SHA - local airflow_ci_sha_image="${AIRFLOW_CI_IMAGE}:${COMMIT_SHA}" - docker_v tag "${AIRFLOW_CI_IMAGE}" "${airflow_ci_sha_image}" - push_pull_remove_images::push_image_with_retries "${airflow_ci_sha_image}" - fi } # Pushes PROD image to registry in GitHub @@ -222,19 +239,18 @@ function push_pull_remove_images::push_ci_images_to_github() { # "${COMMIT_SHA}" - in case of pull-request triggered 'workflow_run' builds # "latest" - in case of push builds function push_pull_remove_images::push_prod_images_to_github () { + if [[ "${PUSH_PYTHON_BASE_IMAGE=}" != "false" ]]; then + push_pull_remove_images::push_python_image_to_github + fi local airflow_prod_tagged_image="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_tagged_image}" push_pull_remove_images::push_image_with_retries "${airflow_prod_tagged_image}" - if [[ -n ${COMMIT_SHA=} ]]; then - # Also push image to GitHub registry with commit SHA - local airflow_prod_sha_image="${AIRFLOW_PROD_IMAGE}:${COMMIT_SHA}" - docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_sha_image}" - push_pull_remove_images::push_image_with_retries "${airflow_prod_sha_image}" + # Also push prod build image if GITHUB_REGISTRY_PUSH_IMAGE_TAG is "latest" + if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then + local airflow_prod_build_tagged_image="${AIRFLOW_PROD_BUILD_IMAGE}:latest" + docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${airflow_prod_build_tagged_image}" + push_pull_remove_images::push_image_with_retries "${airflow_prod_build_tagged_image}" fi - # Also push prod build image - local airflow_prod_build_tagged_image="${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" - docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${airflow_prod_build_tagged_image}" - push_pull_remove_images::push_image_with_retries "${airflow_prod_build_tagged_image}" } # waits for an image to be available in GitHub Container Registry. Should be run with `set +e` diff --git a/scripts/ci/libraries/_script_init.sh b/scripts/ci/libraries/_script_init.sh index 0f3c862..dc79fd5 100755 --- a/scripts/ci/libraries/_script_init.sh +++ b/scripts/ci/libraries/_script_init.sh @@ -41,7 +41,7 @@ build_images::determine_docker_cache_strategy initialization::get_environment_for_builds_on_ci -build_images::get_docker_image_names +build_images::get_docker_cache_image_names initialization::make_constants_read_only
