This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch exp_github_actions
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 2f7c88fe64fe7f481870c186bf65430304e2b29a
Author: Nicolaus Weidner <nicolaus.weid...@ververica.com>
AuthorDate: Wed Nov 17 11:19:51 2021 +0100

    GitHub Actions prototype
---
 .github/actions/worker-setup/action.yml            |  55 ++++
 .github/workflows/flink-ci-template.yml            | 333 +++++++++++++++++++++
 .../workflows/hadoop-2.8.3-scala-2.12-workflow.yml |  45 +++
 tools/azure-pipelines/cache_docker_images.sh       |  10 +-
 tools/azure-pipelines/create_build_artifact.sh     |  26 +-
 tools/azure-pipelines/debug_files_utils.sh         |   4 +
 tools/azure-pipelines/unpack_build_artifact.sh     |  18 +-
 tools/azure-pipelines/uploading_watchdog.sh        |  11 +
 tools/ci/watchdog.sh                               |   2 +-
 9 files changed, 490 insertions(+), 14 deletions(-)

diff --git a/.github/actions/worker-setup/action.yml 
b/.github/actions/worker-setup/action.yml
new file mode 100644
index 00000000000..3e609d769ef
--- /dev/null
+++ b/.github/actions/worker-setup/action.yml
@@ -0,0 +1,55 @@
+name: "Worker Setup"
+description: "Runs steps to set up the worker"
+
+inputs:
+  flink-checkout-folder:
+    description: "The Flink checkout folder"
+    required: true
+  jdk-version:
+    description: "JDK version to be installed"
+    required: true
+  maven-version:
+    description: "Maven version to be installed"
+    required: true
+    default: 3.2.5
+  ssl-enabled:
+    description: "Installs SSL"
+    required: false
+  initial-disk-cleanup:
+    description: "true, if a initial disk clean shall be performed"
+    required: false
+  additional-packages:
+    description: "APT packages that shall be installed in addition to the 
default onces"
+    required: false
+
+runs:
+  using: "composite"
+  steps:
+    - name: "Free up disk space"
+      if: inputs.initial-disk-cleanup == 'true'
+      working-directory: ${{ inputs.flink-checkout-folder }}
+      shell: bash
+      run: ./tools/azure-pipelines/free_disk_space.sh
+
+    # see https://github.com/actions/setup-java#supported-distributions
+    - name: "Set JDK ${{ inputs.jdk-version }}"
+      uses: actions/setup-java@v2
+      with:
+        distribution: "temurin"
+        java-version: ${{ inputs.jdk-version }}
+
+    - name: "Install Maven ${{ inputs.maven-version }}"
+      uses: stCarolas/setup-maven@v4.2
+      with:
+        maven-version: ${{ inputs.maven-version }}
+
+    - name: "Install required dependencies bc and libapr1"
+      shell: bash
+      run: sudo apt-get install -y bc libapr1 ${{ inputs.additional-packages }}
+
+    - name: "Install libssl1.0.0 for netty tcnative"
+      if: inputs.ssl-enabled == 'true'
+      shell: bash
+      run: |
+        wget 
http://security.ubuntu.com/ubuntu/pool/main/o/openssl1.0/libssl1.0.0_1.0.2n-1ubuntu5.7_amd64.deb
+        sudo apt install ./libssl1.0.0_1.0.2n-1ubuntu5.7_amd64.deb
diff --git a/.github/workflows/flink-ci-template.yml 
b/.github/workflows/flink-ci-template.yml
new file mode 100644
index 00000000000..f00ab83a0e3
--- /dev/null
+++ b/.github/workflows/flink-ci-template.yml
@@ -0,0 +1,333 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "Build and Test Apache Flink"
+
+on:
+  workflow_call:
+    inputs:
+      environment:
+        description: "defines environment variables for downstream scripts"
+        required: true
+        type: string
+      jdk-version:
+        description: "the jdk version to use"
+        required: true
+        type: number
+    secrets:
+      s3_bucket:
+        required: false
+      s3_access_key:
+        required: false
+      s3_secret_key:
+        required: false
+      glue_schema_access_key:
+        required: false
+      glue_schema_secret_key:
+        required: false
+
+env:
+  FLINK_ARTIFACT_DIR: ${{ github.workspace }}/
+  FLINK_ARTIFACT_FILENAME: flink_artifacts.tar.gz
+  DOCKER_IMAGES_CACHE_FOLDER: ${{ github.workspace }}/.docker-cache
+  # The checkout directory needs to be changed for the Composite Github action 
usages below as
+  # well, if this env variable is modified
+  CHECKOUT_DIR: ${{ github.workspace }}/flink-checkout
+
+jobs:
+  compile:
+    name: "Compile"
+    runs-on: ubuntu-latest
+    timeout-minutes: 240
+    outputs:
+      stringified-workflow-name: ${{ 
steps.workflow-prep-step.outputs.stringified-workflow-name }}
+    steps:
+      - name: "Stringify workflow name"
+        id: workflow-prep-step
+        run: |
+          stringified_workflow_name=$(echo "${{ github.workflow }}" | tr -C 
'[:alnum:]._' '-' |  tr '[:upper:]' '[:lower:]' | sed -e 's/--*/-/g' -e 
's/^-*//g' -e 's/-*$//g')
+          echo "::set-output 
name=stringified-workflow-name::${stringified_workflow_name}"
+
+      - name: "Set JDK version"
+        run: |
+          echo "JAVA_HOME=$JAVA_HOME_${{inputs.jdk-version}}_X64" >> 
${GITHUB_ENV}
+          echo "PATH=$JAVA_HOME_${{inputs.jdk-version}}_X64/bin:$PATH" >> 
${GITHUB_ENV}
+
+      - name: "Flink Checkout"
+        uses: actions/checkout@v2
+        with:
+          path: ${{ env.CHECKOUT_DIR }}
+
+      - name: "Worker Setup"
+        uses: ./flink-checkout/.github/actions/worker-setup
+        with:
+          flink-checkout-folder: ${{ env.CHECKOUT_DIR }}
+          jdk-version: ${{ inputs.jdk-version }}
+
+      - name: "Compile Flink"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: |
+          ${{ inputs.environment }} ./tools/ci/compile.sh || exit $?
+
+      - name: "Collect build artifacts"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: ./tools/azure-pipelines/create_build_artifact.sh -f ${{ 
env.FLINK_ARTIFACT_DIR }}/${{ env.FLINK_ARTIFACT_FILENAME }}
+
+      - name: "Upload artifacts to make them available in downstream jobs"
+        uses: actions/upload-artifact@v2
+        with:
+          name: build-artifacts-${{ 
steps.workflow-prep-step.outputs.stringified-workflow-name }}-${{ 
github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}${{ env.FLINK_ARTIFACT_FILENAME }}
+          if-no-files-found: error
+
+  test:
+    name: "Test (module: ${{ matrix.module }})"
+    needs: compile
+    runs-on: ubuntu-20.04
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - module: core
+            stringified-module-name: core
+          - module: python
+            stringified-module-name: python
+          - module: table
+            stringified-module-name: table
+          - module: connectors
+            stringified-module-name: connectors
+          - module: kafka/gelly
+            stringified-module-name: kafka-gelly
+          - module: tests
+            stringified-module-name: tests
+          - module: misc
+            stringified-module-name: misc
+          - module: finegrained_resource_management
+            stringified-module-name: finegrained_resource_management
+
+    steps:
+      - name: "Set JDK version"
+        run: |
+          echo "JAVA_HOME=$JAVA_HOME_${{inputs.jdk-version}}_X64" >> 
${GITHUB_ENV}
+          echo "PATH=$JAVA_HOME_${{inputs.jdk-version}}_X64/bin:$PATH" >> 
${GITHUB_ENV}
+
+      - name: "Flink Checkout"
+        uses: actions/checkout@v2
+        with:
+          path: ${{ env.CHECKOUT_DIR }}
+
+      - name: "Worker Setup"
+        uses: ./flink-checkout/.github/actions/worker-setup
+        with:
+          flink-checkout-folder: ${{ env.CHECKOUT_DIR }}
+          jdk-version: ${{ inputs.jdk-version }}
+          ssl-enabled: true
+          initial-disk-cleanup: true
+          additional-packages: "rename"
+
+      - name: "Set coredump pattern"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: sudo sysctl -w kernel.core_pattern=core.%p
+
+      - name: "Download build artifacts from compile job"
+        uses: actions/download-artifact@v2
+        with:
+          name: build-artifacts-${{ 
needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}
+
+      - name: "Unpack build artifact"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: ./tools/azure-pipelines/unpack_build_artifact.sh -f ${{ 
env.FLINK_ARTIFACT_DIR }}/${{ env.FLINK_ARTIFACT_FILENAME }} -t ${{ 
env.CHECKOUT_DIR }}
+
+      - name: "Try loading Docker images from Cache"
+        id: docker-cache
+        uses: actions/cache@v2
+        with:
+          path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
+          key: ${{ matrix.module }}-docker-${{ runner.os }}-${{ 
hashFiles('**/cache_docker_images.sh', 
'**/flink-test-utils-parent/**/DockerImageVersions.java') }}
+          restore-keys: ${{ matrix.module }}-docker-${{ runner.os }}
+
+      - name: "Load Docker images if not present in cache, yet"
+        if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh -f ${{ 
env.DOCKER_IMAGES_CACHE_FOLDER }} load
+
+      - name: "Test - ${{ matrix.module }}"
+        id: test-run
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        env:
+          IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
+          IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
+          IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
+          IT_CASE_GLUE_SCHEMA_ACCESS_KEY: ${{ secrets.glue_schema_access_key }}
+          IT_CASE_GLUE_SCHEMA_SECRET_KEY: ${{ secrets.glue_schema_secret_key }}
+        timeout-minutes: 240
+        run: |
+          ${{ inputs.environment }} 
./tools/azure-pipelines/uploading_watchdog.sh \
+              -a ${{ github.job }} \
+              -d ${{ env.FLINK_ARTIFACT_DIR }} \
+              -t 240 \
+              ./tools/ci/test_controller.sh ${{ matrix.module }}
+
+      - name: "Post-process build artifacts"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: find ${{ steps.test-run.outputs.debug-files-output-dir }} -type f 
-exec rename 's/[:<>|*?]/-/' {} \;
+
+      - name: "Upload build artifacts"
+        uses: actions/upload-artifact@v2
+        if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir }} 
!= ''
+        with:
+          name: logs-test-${{ needs.compile.outputs.stringified-workflow-name 
}}-${{ github.run_number }}-${{ matrix.stringified-module-name }}-${{ 
steps.test-run.outputs.debug-files-name }}
+          path: ${{ steps.test-run.outputs.debug-files-output-dir }}
+
+      - name: "Save Docker images to cache"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh -f ${{ 
env.DOCKER_IMAGES_CACHE_FOLDER }} save
+
+  e2e-prereq-check:
+    name: "Check: Code modified"
+    needs: compile
+    runs-on: ubuntu-latest
+    outputs:
+      skip-e2e: ${{ steps.docs-only-pr-check.skip-e2e }}
+    steps:
+      - name: "Flink Checkout"
+        uses: actions/checkout@v2
+        with:
+          path: ${{ env.CHECKOUT_DIR }}
+
+      # Skip e2e test execution if this is a documentation only pull request 
(master / release builds will still be checked regularly)
+      - name: "Check if it's a docs-only PR (i.e. e2e tests can be skipped)"
+        id: docs-only-pr-check
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: |
+          source ./tools/azure-pipelines/build_properties.sh
+          if is_docs_only_pullrequest; then
+            echo "This is a documentation-only change. Skipping e2e execution."
+            echo "::set-output name=skip-e2e::true"
+          else
+            echo "This is a regular CI build. Continuing ..."
+          fi
+        shell: bash
+
+  e2e:
+    name: "E2E (group ${{ matrix.group }})"
+    needs: [compile, e2e-prereq-check]
+    runs-on: ubuntu-20.04
+    if: ${{ needs.e2e-prereq-check.outputs.skip-e2e != 'true' }}
+    timeout-minutes: 310
+    env:
+      E2E_CACHE_FOLDER: ${{ github.workspace }}/.e2e-cache
+      E2E_TARBALL_CACHE: ${{ github.workspace }}/.e2e-tarbal-cache
+    strategy:
+      fail-fast: false
+      matrix:
+        group: [1, 2]
+
+    steps:
+      - name: "Set JDK version"
+        run: |
+          echo "JAVA_HOME=$JAVA_HOME_${{inputs.jdk-version}}_X64" >> 
${GITHUB_ENV}
+          echo "PATH=$JAVA_HOME_${{inputs.jdk-version}}_X64/bin:$PATH" >> 
${GITHUB_ENV}
+
+      - name: "Flink Checkout"
+        uses: actions/checkout@v2
+        with:
+          path: ${{ env.CHECKOUT_DIR }}
+
+      - name: "Worker Setup"
+        uses: ./flink-checkout/.github/actions/worker-setup
+        with:
+          flink-checkout-folder: ${{ env.CHECKOUT_DIR }}
+          jdk-version: ${{ inputs.jdk-version }}
+          ssl-enabled: true
+          initial-disk-cleanup: true
+
+      - name: "Download build artifacts from compile job"
+        uses: actions/download-artifact@v2
+        with:
+          name: build-artifacts-${{ 
needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
+          path: ${{ env.FLINK_ARTIFACT_DIR }}
+
+      - name: "Unpack build artifact"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: ./tools/azure-pipelines/unpack_build_artifact.sh -f ${{ 
env.FLINK_ARTIFACT_DIR }}/${{ env.FLINK_ARTIFACT_FILENAME }} -t ${{ 
env.CHECKOUT_DIR }}
+
+      # the cache task does not create directories on a cache miss, and can 
later fail when trying to tar the directory if the test haven't created it
+      # this may for example happen if a given directory is only used by a 
subset of tests, which are run in a different 'group'
+      - name: "Create cache directories"
+        run: |
+          mkdir -p ${{ env.E2E_CACHE_FOLDER }}
+          mkdir -p ${{ env.E2E_TARBALL_CACHE }}
+          mkdir -p ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
+
+      - name: "Load E2E files from Cache"
+        uses: actions/cache@v2
+        with:
+          path: ${{ env.E2E_CACHE_FOLDER }}
+          key: e2e-cache-${{ matrix.group }}-${{ 
hashFiles('**/flink-end-to-end-tests/**/*.java', '!**/avro/**') }}
+
+      - name: "Load E2E artifacts from Cache"
+        uses: actions/cache@v2
+        with:
+          path: ${{ env.E2E_TARBALL_CACHE }}
+          key: e2e-artifact-cache-${{ matrix.group }}-${{ 
hashFiles('**/flink-end-to-end-tests/**/*.sh') }}
+          restore-keys: e2e-artifact-cache-${{ matrix.group }}
+
+      - name: "Try loading Docker images from Cache"
+        id: docker-cache
+        uses: actions/cache@v2
+        with:
+          path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
+          key: e2e-${{ matrix.group }}-docker-${{ runner.os }}-${{ 
hashFiles('**/cache_docker_images.sh', 
'**/flink-test-utils-parent/**/DockerImageVersions.java') }}
+
+      - name: "Load Docker images if not present in Cache, yet"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh -f ${{ 
env.DOCKER_IMAGES_CACHE_FOLDER }} load
+
+      - name: "Build Flink"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        run: ${{ inputs.environment }} ./tools/ci/validate.sh "install 
-DskipTests -Dfast $PROFILE -Pskip-webui-build"
+
+      - name: "Run E2E Tests"
+        id: test-run
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        env:
+          IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
+          IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
+          IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
+          IT_CASE_GLUE_SCHEMA_ACCESS_KEY: ${{ secrets.glue_schema_access_key }}
+          IT_CASE_GLUE_SCHEMA_SECRET_KEY: ${{ secrets.glue_schema_secret_key }}
+        timeout-minutes: 310
+        run: |
+          ${{ inputs.environment }} FLINK_DIR=`pwd`/build-target 
./tools/azure-pipelines/uploading_watchdog.sh \
+            -a ${{ github.job }} \
+            -d ${{ env.FLINK_ARTIFACT_DIR }} \
+            -t 310 \
+            flink-end-to-end-tests/run-nightly-tests.sh ${{ matrix.group }}
+
+      - name: "Upload Logs"
+        uses: actions/upload-artifact@v2
+        if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir }} 
!= ''
+        with:
+          name: logs-e2e-${{ needs.compile.outputs.stringified-workflow-name 
}}-${{ github.run_number }}-${{ matrix.group }}-${{ 
steps.test-run.outputs.debug-files-name }}
+          path: ${{ steps.test-run.outputs.debug-files-output-dir }}
+
+      - name: "Save Docker images to Cache"
+        working-directory: ${{ env.CHECKOUT_DIR }}
+        if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
+        run: ./tools/azure-pipelines/cache_docker_images.sh -f ${{ 
env.DOCKER_IMAGES_CACHE_FOLDER }} save
diff --git a/.github/workflows/hadoop-2.8.3-scala-2.12-workflow.yml 
b/.github/workflows/hadoop-2.8.3-scala-2.12-workflow.yml
new file mode 100644
index 00000000000..e87b92a29b8
--- /dev/null
+++ b/.github/workflows/hadoop-2.8.3-scala-2.12-workflow.yml
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "Hadoop 2.8.3/Java 8/Scala 2.12"
+
+on: [push, workflow_dispatch]
+
+jobs:
+  ci:
+    uses: ./.github/workflows/flink-ci-template.yml
+    with:
+      environment: 'PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.12"'
+      jdk-version: 8
+    secrets:
+      s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
+      s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
+      s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
+      glue_schema_access_key: ${{ secrets.IT_CASE_GLUE_SCHEMA_ACCESS_KEY }}
+      glue_schema_secret_key: ${{ secrets.IT_CASE_GLUE_SCHEMA_SECRET_KEY }}
+  docs-404-check:
+    runs-on: ubuntu-latest
+    steps:
+      - name: "Checks out Flink"
+        uses: actions/checkout@v2
+
+      - name: "Check if PR contains docs change"
+        run: |
+          source ./tools/azure-pipelines/build_properties.sh
+          pr_contains_docs_changes
+        shell: bash
+
+      - name: "Builds docs"
+        run: ./tools/ci/docs.sh
diff --git a/tools/azure-pipelines/cache_docker_images.sh 
b/tools/azure-pipelines/cache_docker_images.sh
index 78d31a56901..42414b044c0 100755
--- a/tools/azure-pipelines/cache_docker_images.sh
+++ b/tools/azure-pipelines/cache_docker_images.sh
@@ -21,6 +21,14 @@
 # includes testcontainer images, kafka, elasticearch, etc.
 #
 
+while getopts "f:" o; do
+    case "${o}" in
+        f)
+            DOCKER_IMAGES_CACHE_FOLDER=${OPTARG};;
+    esac
+done
+shift $((OPTIND-1))
+
 if [ -z "${DOCKER_IMAGES_CACHE_FOLDER:-}" ]
 then
     echo "\$DOCKER_IMAGES_CACHE_FOLDER must be set to cache the testing docker 
images. Exiting"
@@ -36,7 +44,7 @@ 
DOCKER_IMAGES_CACHE_PATH="${DOCKER_IMAGES_CACHE_FOLDER}/cache.tar"
 helpFunction()
 {
    echo ""
-   echo "Usage: $0 MODE"
+   echo "Usage: $0 [-f <folder>] MODE"
    echo -e "\tMODE :: What mode to run the script in (either save or load)"
    exit 1
 }
diff --git a/tools/azure-pipelines/create_build_artifact.sh 
b/tools/azure-pipelines/create_build_artifact.sh
index 809d14fe651..ccadb14ed14 100755
--- a/tools/azure-pipelines/create_build_artifact.sh
+++ b/tools/azure-pipelines/create_build_artifact.sh
@@ -17,9 +17,16 @@
 # limitations under the License.
 
################################################################################
 
-echo "Creating build artifact dir $FLINK_ARTIFACT_DIR"
-
-cp -r . "$FLINK_ARTIFACT_DIR"
+while getopts "f:" o; do
+    case "${o}" in
+        f)
+            FLINK_ARTIFACT_DIR=${OPTARG};;
+        *)
+          # no special treatment of invalid parameters necessary
+          ;;
+    esac
+done
+shift $((OPTIND-1))
 
 echo "Minimizing artifact files"
 
@@ -28,20 +35,21 @@ echo "Minimizing artifact files"
 # by removing files not required for subsequent stages
 
 # jars are re-built in subsequent stages, so no need to cache them (cannot be 
avoided)
-find "$FLINK_ARTIFACT_DIR" -maxdepth 8 -type f -name '*.jar' | xargs rm -rf
+find . -maxdepth 8 -type f -name '*.jar' -exec rm -rf {} \;
 
 # .git directory
 # not deleting this can cause build stability issues
 # merging the cached version sometimes fails
-rm -rf "$FLINK_ARTIFACT_DIR/.git"
+rm -rf "./.git"
 
 # AZ Pipelines has a problem with links.
-rm "$FLINK_ARTIFACT_DIR/build-target"
+rm "./build-target"
 
 # Remove javadocs because they are not used in later stages
-rm -rf "$FLINK_ARTIFACT_DIR/target/site"
+rm -rf "./target/site"
 
 # Remove WebUI node directories; unnecessary because the UI is already fully 
built
-rm -rf "$FLINK_ARTIFACT_DIR/flink-runtime-web/web-dashboard/node"
-rm -rf "$FLINK_ARTIFACT_DIR/flink-runtime-web/web-dashboard/node_modules"
+rm -rf "./flink-runtime-web/web-dashboard/node"
+rm -rf "./flink-runtime-web/web-dashboard/node_modules"
 
+tar -c -z --exclude ${FLINK_ARTIFACT_DIR} -f ${FLINK_ARTIFACT_DIR} .
diff --git a/tools/azure-pipelines/debug_files_utils.sh 
b/tools/azure-pipelines/debug_files_utils.sh
index 50c1b4c5a4d..f5fe2d5c0ce 100755
--- a/tools/azure-pipelines/debug_files_utils.sh
+++ b/tools/azure-pipelines/debug_files_utils.sh
@@ -21,7 +21,11 @@ function prepare_debug_files {
        MODULE=$@
        export DEBUG_FILES_OUTPUT_DIR="$AGENT_TEMPDIRECTORY/debug_files"
        export DEBUG_FILES_NAME="$(echo $MODULE | tr -c '[:alnum:]\n\r' 
'_')-$(date +%s)"
+       # make environment variables available in AzureCI workflow 
configurations
        echo "##vso[task.setvariable 
variable=DEBUG_FILES_OUTPUT_DIR]$DEBUG_FILES_OUTPUT_DIR"
        echo "##vso[task.setvariable 
variable=DEBUG_FILES_NAME]$DEBUG_FILES_NAME"
+       # make environment variables available in Github Actions workflow 
configuration
+  echo "::set-output name=debug-files-output-dir::${DEBUG_FILES_OUTPUT_DIR}"
+  echo "::set-output name=debug-files-name::${DEBUG_FILES_NAME}"
        mkdir -p $DEBUG_FILES_OUTPUT_DIR || { echo "FAILURE: cannot create 
debug files directory '${DEBUG_FILES_OUTPUT_DIR}'." ; exit 1; }
 }
diff --git a/tools/azure-pipelines/unpack_build_artifact.sh 
b/tools/azure-pipelines/unpack_build_artifact.sh
index 15d5c7c1057..ab012170b23 100755
--- a/tools/azure-pipelines/unpack_build_artifact.sh
+++ b/tools/azure-pipelines/unpack_build_artifact.sh
@@ -17,14 +17,26 @@
 # limitations under the License.
 
################################################################################
 
+while getopts "f:t:" o; do
+    case "${o}" in
+        f)
+            FLINK_ARTIFACT_DIR=${OPTARG};;
+        t)
+            TARGET_FOLDER_PARAMETER="-C ${OPTARG}";;
+        *)
+          # no special treatment of invalid parameters necessary
+          ;;
+    esac
+done
+shift $((OPTIND-1))
 
 if ! [ -e $FLINK_ARTIFACT_DIR ]; then
-    echo "Cached flink dir $FLINK_ARTIFACT_DIR does not exist. Exiting build."
+    echo "Cached flink archive $FLINK_ARTIFACT_DIR does not exist. Exiting 
build."
     exit 1
 fi
 
-echo "Merging cache"
-cp -RT "$FLINK_ARTIFACT_DIR" "."
+echo "Extracting build artifacts"
+tar -xzf ${FLINK_ARTIFACT_DIR} ${TARGET_FOLDER_PARAMETER}
 
 echo "Adjusting timestamps"
 # adjust timestamps of proto file to avoid re-generation
diff --git a/tools/azure-pipelines/uploading_watchdog.sh 
b/tools/azure-pipelines/uploading_watchdog.sh
index c49f093200a..58509e2e3a1 100755
--- a/tools/azure-pipelines/uploading_watchdog.sh
+++ b/tools/azure-pipelines/uploading_watchdog.sh
@@ -19,6 +19,17 @@
 # b) It prints a warning if the test has reached 80% of it's execution time
 # c) N minutes before the end of the execution time, it will start uploading 
the current output as azure artifacts
 
+while getopts 'a:d:t:' flag; do
+  case "${flag}" in
+    a) export AGENT_JOBNAME="$OPTARG";;
+    d) export AGENT_TEMPDIRECTORY="$OPTARG";;
+    t) export SYSTEM_JOBTIMEOUT="$OPTARG";;
+    *) echo "Wrong parameter passed. $OPTARG";;
+  esac
+done
+
+shift "$((OPTIND-1))"
+
 COMMAND=$@
 
 HERE="`dirname \"$0\"`"             # relative
diff --git a/tools/ci/watchdog.sh b/tools/ci/watchdog.sh
index 1c069e38840..4485501dfca 100755
--- a/tools/ci/watchdog.sh
+++ b/tools/ci/watchdog.sh
@@ -23,7 +23,7 @@
 #
 
 # Number of seconds w/o output before printing a stack trace and killing the 
watched process
-MAX_NO_OUTPUT=${MAX_NO_OUTPUT:-900}
+MAX_NO_OUTPUT=${MAX_NO_OUTPUT:-1800}
 
 # Number of seconds to sleep before checking the output again
 SLEEP_TIME=${SLEEP_TIME:-20}

Reply via email to