This is an automated email from the ASF dual-hosted git repository.

anandinguva pushed a commit to branch zip_to_tar
in repository https://gitbox.apache.org/repos/asf/beam.git

commit 81d74f6fedbd0340ac9585f196cb072d64d6b3d6
Author: Anand Inguva <[email protected]>
AuthorDate: Mon Oct 23 12:25:33 2023 -0400

    change extension of beam sdist from .zip to .tar.gz
---
 .github/actions/common-rc-validation/action.yaml   | 10 +--
 .github/workflows/build_release_candidate.yml      | 16 ++--
 .github/workflows/run_rc_validation.yml            | 93 +++++++++++-----------
 contributor-docs/release-guide.md                  | 10 +--
 .../python_release_automation_utils.sh             |  6 +-
 .../src/main/scripts/build_release_candidate.sh    | 16 ++--
 .../main/scripts/deploy_release_candidate_pypi.sh  |  4 +-
 release/src/main/scripts/run_rc_validation.sh      | 48 +++++------
 sdks/python/scripts/run_snapshot_publish.sh        |  6 +-
 9 files changed, 104 insertions(+), 105 deletions(-)

diff --git a/.github/actions/common-rc-validation/action.yaml 
b/.github/actions/common-rc-validation/action.yaml
index 23efa93d153..51738e13812 100644
--- a/.github/actions/common-rc-validation/action.yaml
+++ b/.github/actions/common-rc-validation/action.yaml
@@ -36,19 +36,19 @@ runs:
       shell: bash
       run: |
           echo "---------------------Downloading Python Staging 
RC----------------------------"
-          wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip
-          wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip.sha512
-          if [[ ! -f apache-beam-$RELEASE_VER.zip ]]; then
+          wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz
+          wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz.sha512
+          if [[ ! -f apache-beam-$RELEASE_VER.tar.gz ]]; then
             { echo "Fail to download Python Staging RC files." ;exit 1; }
           fi
 
           echo "--------------------------Verifying 
Hashes------------------------------------"
-          sha512sum -c apache-beam-${RELEASE_VER}.zip.sha512
+          sha512sum -c apache-beam-${RELEASE_VER}.tar.gz.sha512
 
           `which pip` install --upgrade pip
           `which pip` install --upgrade setuptools
 
     - name: Installing python SDK
       shell: bash
-      run: pip install apache-beam-${RELEASE_VER}.zip[gcp]
+      run: pip install apache-beam-${RELEASE_VER}.tar.gz[gcp]
       
\ No newline at end of file
diff --git a/.github/workflows/build_release_candidate.yml 
b/.github/workflows/build_release_candidate.yml
index 12f1537dac1..7c3e7b98c4b 100644
--- a/.github/workflows/build_release_candidate.yml
+++ b/.github/workflows/build_release_candidate.yml
@@ -122,18 +122,18 @@ jobs:
             mkdir -p beam/${{ github.event.inputs.RELEASE }}
             cd beam/${{ github.event.inputs.RELEASE }}
             RC_DIR="beam-${{ github.event.inputs.RELEASE }}-RC${{ 
github.event.inputs.RC }}"
-            RC_ZIP="${RC_DIR}.zip"
+            RC_ZIP="${RC_DIR}.tar.gz"
             RELEASE_DIR="beam-${{ github.event.inputs.RELEASE }}"
             RC_TAG="v${{ github.event.inputs.RELEASE }}-RC${{ 
github.event.inputs.RC }}"
-            SOURCE_RELEASE_ZIP="apache-beam-${{ github.event.inputs.RELEASE 
}}-source-release.zip"
+            SOURCE_RELEASE_ZIP="apache-beam-${{ github.event.inputs.RELEASE 
}}-source-release.tar.gz"
               # Check whether there is an existing dist dir
             if (svn ls "${SOURCE_RELEASE_ZIP}"); then
               echo "Removing existing ${SOURCE_RELEASE_ZIP}."
               svn delete "${SOURCE_RELEASE_ZIP}"
             fi
 
-            echo "Downloading: 
https://github.com/apache/beam/archive/${RC_TAG}.zip";
-            wget https://github.com/apache/beam/archive/${RC_TAG}.zip  -O 
"${RC_ZIP}"
+            echo "Downloading: 
https://github.com/apache/beam/archive/${RC_TAG}.tar.gz";
+            wget https://github.com/apache/beam/archive/${RC_TAG}.tar.gz  -O 
"${RC_ZIP}"
 
             unzip "$RC_ZIP"
             rm "$RC_ZIP"
@@ -197,11 +197,11 @@ jobs:
         
           cd "${RELEASE_DIR}"/python
         
-          echo "------Checking Hash Value for apache-beam-${RELEASE}.zip-----"
-          sha512sum -c "apache-beam-${RELEASE}.zip.sha512"
+          echo "------Checking Hash Value for 
apache-beam-${RELEASE}.tar.gz-----"
+          sha512sum -c "apache-beam-${RELEASE}.tar.gz.sha512"
         
-          echo "------Signing Source Release apache-beam-${RELEASE}.zip------"
-          gpg --local-user "${{steps.import_gpg.outputs.name}}" --armor 
--detach-sig "apache-beam-${RELEASE}.zip"
+          echo "------Signing Source Release 
apache-beam-${RELEASE}.tar.gz------"
+          gpg --local-user "${{steps.import_gpg.outputs.name}}" --armor 
--detach-sig "apache-beam-${RELEASE}.tar.gz"
         
           for artifact in *.whl; do
             echo "----------Checking Hash Value for ${artifact} 
wheel-----------"
diff --git a/.github/workflows/run_rc_validation.yml 
b/.github/workflows/run_rc_validation.yml
index 4902fee8101..35fb3ce05ea 100644
--- a/.github/workflows/run_rc_validation.yml
+++ b/.github/workflows/run_rc_validation.yml
@@ -17,14 +17,14 @@
 
 # To learn more about GitHub Actions in Apache Beam check the CI.m
 
-name: Run RC Validation 
+name: Run RC Validation
 on:
   workflow_dispatch:
     inputs:
       RELEASE_VER:
         description: Beam current Release Version
         required: true
-        default: 2.42.0 
+        default: 2.42.0
       USER_GCS_BUCKET:
         description: Bucket to upload results
         required: true
@@ -57,9 +57,9 @@ on:
         type: boolean
         required: true
         default: true
-env: 
+env:
   RC_TAG: 
"v${{github.event.inputs.RELEASE_VER}}-RC${{github.event.inputs.RC_NUM}}"
-  RELEASE_VER: ${{github.event.inputs.RELEASE_VER}}    
+  RELEASE_VER: ${{github.event.inputs.RELEASE_VER}}
   USER_GCP_PROJECT: apache-beam-testing
   PYTHON_RC_DOWNLOAD_URL: https://dist.apache.org/repos/dist/dev/beam
   USER_GCP_REGION: us-central1
@@ -100,19 +100,19 @@ jobs:
       - name: Comment on PR to Trigger Python ReleaseCandidate Test
         run: |
           gh pr comment "$GITHUB_PR_URL" --body "Run Python ReleaseCandidate"
-  
+
   sql_taxi_with_dataflow:
     runs-on: [self-hosted,ubuntu-20.04]
     if: ${{github.event.inputs.RUN_SQL_TAXI_WITH_DATAFLOW == 'true'}}
     strategy:
-      matrix: 
+      matrix:
         py_version: [3.8]
     steps:
     - name: Checkout code
       uses: actions/checkout@v4
-      with: 
-       ref: ${{env.RC_TAG}}      
-    
+      with:
+       ref: ${{env.RC_TAG}}
+
     - name: Install Python
       uses: actions/setup-python@v4
       with:
@@ -154,23 +154,23 @@ jobs:
         --num_workers 5 \
         --output_topic projects/${USER_GCP_PROJECT}/topics/${SQL_TAXI_TOPIC} \
         
--beam_services="{\":sdks:java:extensions:sql:expansion-service:shadowJar\": 
\"${SQL_EXPANSION_SERVICE_JAR}\"}" \
-        --sdk_location apache-beam-${RELEASE_VER}.zip || true
+        --sdk_location apache-beam-${RELEASE_VER}.tar.gz || true
     - name: Checking Results
       run: |
         gcloud pubsub subscriptions pull --project=${USER_GCP_PROJECT} 
--limit=5 ${SQL_TAXI_SUBSCRIPTION}
         gcloud pubsub subscriptions pull --project=${USER_GCP_PROJECT} 
--limit=5 ${SQL_TAXI_SUBSCRIPTION}
     - name: Removing Pub Sub Topic
       if: always()
-      run: | 
+      run: |
         gcloud pubsub topics delete --project=${USER_GCP_PROJECT} 
${SQL_TAXI_TOPIC}
         gcloud pubsub subscriptions delete --project=${USER_GCP_PROJECT} 
${SQL_TAXI_SUBSCRIPTION}
 
 
   python_cross_validation:
     runs-on: [self-hosted,ubuntu-20.04]
-    if: ${{github.event.inputs.RUN_PYTHON_CROSS_VALIDATION == 'true'}} 
+    if: ${{github.event.inputs.RUN_PYTHON_CROSS_VALIDATION == 'true'}}
     strategy:
-      matrix: 
+      matrix:
         py_version: [3.8]
     steps:
     - name: Checkout code
@@ -183,7 +183,7 @@ jobs:
         echo "====================Checking Environment & 
Variables================="
         echo ""
         echo "running validations on release 
${{github.event.inputs.RELEASE_VER}} RC${{github.event.inputs.RC_NUM}}."
-    - name: Install Kubectl 
+    - name: Install Kubectl
       uses: azure/setup-kubectl@v3
 
     - name: Setup Java JDK
@@ -196,7 +196,7 @@ jobs:
       uses: actions/setup-python@v4
       with:
         python-version: ${{matrix.py_version}}
-   
+
 
     - name: Setting python env
       uses: ./.github/actions/common-rc-validation
@@ -206,19 +206,19 @@ jobs:
 
     - name: Installing gcloud-auth-plugin
       run: sudo apt-get install google-cloud-sdk-gke-gcloud-auth-plugin
-    - name: Setting Kafka Cluster Name 
+    - name: Setting Kafka Cluster Name
       run: |
-        echo "KAFKA_CLUSTER_NAME=xlang-kafka-cluster-$RANDOM">> $GITHUB_ENV 
-    
+        echo "KAFKA_CLUSTER_NAME=xlang-kafka-cluster-$RANDOM">> $GITHUB_ENV
+
 
     - name: Creating Kafka Cluster
       run: |
         gcloud container clusters create --project=${USER_GCP_PROJECT} 
--region=${USER_GCP_REGION} --no-enable-ip-alias $KAFKA_CLUSTER_NAME
         kubectl apply -R -f .test-infra/kubernetes/kafka-cluster
-    
+
     - name: Waiting for Kafka cluster to be ready
       run: kubectl wait --for=condition=Ready pod/kafka-0 --timeout=1200s
-    - name: Start xlang Kafka Taxi with Dataflow Runner 
+    - name: Start xlang Kafka Taxi with Dataflow Runner
       run: |
         echo "BOOTSTRAP_SERVERS=$(kubectl get svc outside-0 -o 
jsonpath='{.status.loadBalancer.ingress[0].ip}'):32400" >> $GITHUB_ENV
         echo "KAFKA_TAXI_DF_DATASET=${GITHUB_ACTOR}_python_validations_$(date 
+%m%d)_$RANDOM" >> $GITHUB_ENV
@@ -243,16 +243,16 @@ jobs:
         --temp_location=${USER_GCS_BUCKET}/temp/ \
         --with_metadata \
         --beam_services="{\"sdks:java:io:expansion-service:shadowJar\": 
\"${KAFKA_EXPANSION_SERVICE_JAR}\"}" \
-        --sdk_location apache-beam-${RELEASE_VER}.zip || true
+        --sdk_location apache-beam-${RELEASE_VER}.tar.gz || true
     - name: Checking executions results
       run: |
-        bq head -n 10 ${KAFKA_TAXI_DF_DATASET}.xlang_kafka_taxi          
+        bq head -n 10 ${KAFKA_TAXI_DF_DATASET}.xlang_kafka_taxi
     - name: Remove BigQuery Dataset
       if: always()
       run: |
         bq rm -f ${KAFKA_TAXI_DF_DATASET}.xlang_kafka_taxi
         bq rm -f ${KAFKA_TAXI_DF_DATASET}
-    
+
     - name: Delete Kafka Cluster
       if: always()
       run: gcloud container clusters delete --project=${USER_GCP_PROJECT} 
--region=${USER_GCP_REGION} --async -q $KAFKA_CLUSTER_NAME
@@ -266,8 +266,8 @@ jobs:
     steps:
     - name: Sending PubSub name to env
       run: |
-        echo 
"SHARED_PUBSUB_TOPIC=leader_board-${GITHUB_ACTOR}-python-topic-$(date 
+%m%d)_$RANDOM" >> $GITHUB_ENV 
-    - id: generate_pubsub_name 
+        echo 
"SHARED_PUBSUB_TOPIC=leader_board-${GITHUB_ACTOR}-python-topic-$(date 
+%m%d)_$RANDOM" >> $GITHUB_ENV
+    - id: generate_pubsub_name
       run: |
         echo "::set-output name=pubsub::$SHARED_PUBSUB_TOPIC"
     - name: Creating Pub Sub Topics
@@ -287,7 +287,7 @@ jobs:
       uses: actions/setup-python@v4
       with:
         python-version: '3.8'
-    
+
     - name: Setting python env
       uses: ./.github/actions/common-rc-validation
       with:
@@ -343,14 +343,14 @@ jobs:
       run: |
         ls
         cd word-count-beam
-        timeout --preserve-status 50m mvn compile exec:java 
-Dexec.mainClass=org.apache.beam.examples.complete.game.injector.Injector 
-Dexec.args="${USER_GCP_PROJECT} ${{needs.generate_shared_pubsub.outputs.name}} 
none" || true 
-  
+        timeout --preserve-status 50m mvn compile exec:java 
-Dexec.mainClass=org.apache.beam.examples.complete.game.injector.Injector 
-Dexec.args="${USER_GCP_PROJECT} ${{needs.generate_shared_pubsub.outputs.name}} 
none" || true
+
 
   direct_runner_leaderboard:
     runs-on: [self-hosted, ubuntu-20.04]
     if: ${{github.event.inputs.RUN_DIRECT_RUNNER_TESTS == 'true' }}
     strategy:
-      matrix: 
+      matrix:
         py_version: [3.8]
     needs: generate_shared_pubsub
     steps:
@@ -363,13 +363,13 @@ jobs:
         uses: actions/setup-python@v4
         with:
           python-version: ${{matrix.py_version}}
-      
+
       - name: Setting python env
         uses: ./.github/actions/common-rc-validation
         with:
           RELEASE_VER: ${{env.RELEASE_VER}}
           PYTHON_RC_DOWNLOAD_URL: ${{env.PYTHON_RC_DOWNLOAD_URL}}
-      
+
       - name: Exporting leaderboard Dataset Name
         run: echo 
"LEADERBOARD_DIRECT_DATASET=${GITHUB_ACTOR}_python_validations_$(date 
+%m%d)_$RANDOM" >> $GITHUB_ENV
       - name: Creating Dataset
@@ -389,11 +389,11 @@ jobs:
           bq head -n 10 ${LEADERBOARD_DIRECT_DATASET}.leader_board_teams
       - name: Removing BigQuery Dataset
         if: always()
-        run: | 
+        run: |
           bq rm -f ${LEADERBOARD_DIRECT_DATASET}.leader_board_users
           bq rm -f ${LEADERBOARD_DIRECT_DATASET}.leader_board_teams
           bq rm -f $LEADERBOARD_DIRECT_DATASET
-  
+
   dataflow_runner_leaderboard:
     runs-on: [self-hosted,ubuntu-20.04]
     if: ${{github.event.inputs.RUN_DATAFLOW_RUNNER_TESTS=='true'}}
@@ -411,13 +411,13 @@ jobs:
         uses: actions/setup-python@v4
         with:
           python-version: ${{matrix.py_version}}
-      
+
       - name: Setting python env
         uses: ./.github/actions/common-rc-validation
         with:
           RELEASE_VER: ${{env.RELEASE_VER}}
           PYTHON_RC_DOWNLOAD_URL: ${{env.PYTHON_RC_DOWNLOAD_URL}}
-      
+
       - name: Exporting Dataflow Dataset Name
         run: echo 
"LEADERBOARD_DF_DATASET=${GITHUB_ACTOR}_python_validations_$(date 
+%m%d)_$RANDOM" >> $GITHUB_ENV
       - name: Creating Dataset
@@ -434,18 +434,18 @@ jobs:
           --dataset ${LEADERBOARD_DF_DATASET} \
           --runner DataflowRunner \
           --temp_location=${USER_GCS_BUCKET}/temp/ \
-          --sdk_location apache-beam-${RELEASE_VER}.zip || true
+          --sdk_location apache-beam-${RELEASE_VER}.tar.gz || true
       - name: Checking results
         run: |
           bq head -n 10 ${LEADERBOARD_DF_DATASET}.leader_board_users
           bq head -n 10 ${LEADERBOARD_DF_DATASET}.leader_board_teams
       - name: Removing BigQuery Dataset
         if: always()
-        run: | 
+        run: |
           bq rm -f ${LEADERBOARD_DF_DATASET}.leader_board_users
           bq rm -f ${LEADERBOARD_DF_DATASET}.leader_board_teams
           bq rm -f $LEADERBOARD_DF_DATASET
-      
+
 
   direct_runner_gamestats:
     runs-on: [self-hosted,ubuntu-20.04]
@@ -463,13 +463,13 @@ jobs:
         uses: actions/setup-python@v4
         with:
           python-version: ${{matrix.py_version}}
-   
+
       - name: Setting python env
         uses: ./.github/actions/common-rc-validation
         with:
           RELEASE_VER: ${{env.RELEASE_VER}}
           PYTHON_RC_DOWNLOAD_URL: ${{env.PYTHON_RC_DOWNLOAD_URL}}
-      
+
       - name: Exporting Gamestates Direct Dataset Name
         run: echo 
"GAMESTATS_DIRECT_DATASET=${GITHUB_ACTOR}_python_validations_$(date 
+%m%d)_$RANDOM" >> $GITHUB_ENV
       - name: Creating Dataset
@@ -490,7 +490,7 @@ jobs:
           bq head -n 10 ${GAMESTATS_DIRECT_DATASET}.game_stats_sessions
       - name: Removing BigQuery Dataset
         if: always()
-        run: | 
+        run: |
           bq rm -f ${GAMESTATS_DIRECT_DATASET}.game_stats_sessions
           bq rm -f ${GAMESTATS_DIRECT_DATASET}.game_stats_teams
           bq rm -f $GAMESTATS_DIRECT_DATASET
@@ -512,13 +512,13 @@ jobs:
         uses: actions/setup-python@v4
         with:
           python-version: ${{matrix.py_version}}
-      
+
       - name: Setting python env
         uses: ./.github/actions/common-rc-validation
         with:
           RELEASE_VER: ${{env.RELEASE_VER}}
           PYTHON_RC_DOWNLOAD_URL: ${{env.PYTHON_RC_DOWNLOAD_URL}}
-      
+
       - name: Exporting Gamestates Direct Dataset Name
         run: echo 
"GAMESTATS_DF_DATASET=${GITHUB_ACTOR}_python_validations_$(date +%m%d)_$RANDOM" 
>> $GITHUB_ENV
       - name: Creating Dataset
@@ -535,7 +535,7 @@ jobs:
           --dataset ${GAMESTATS_DF_DATASET} \
           --runner DataflowRunner \
           --temp_location=${USER_GCS_BUCKET}/temp/ \
-          --sdk_location apache-beam-${RELEASE_VER}.zip \
+          --sdk_location apache-beam-${RELEASE_VER}.tar.gz \
           --fixed_window_duration ${FIXED_WINDOW_DURATION} || true
       - name: Checking Results
         run: |
@@ -545,9 +545,9 @@ jobs:
         if: always()
         run: |
           bq rm -f ${GAMESTATS_DF_DATASET}.game_stats_teams
-          bq rm -f ${GAMESTATS_DF_DATASET}.game_stats_sessions 
+          bq rm -f ${GAMESTATS_DF_DATASET}.game_stats_sessions
           bq rm -f $GAMESTATS_DF_DATASET
-  
+
   remove_shared_pubsub:
     runs-on: [self-hosted,ubuntu-20.04]
     needs: [java_injector, generate_shared_pubsub]
@@ -555,4 +555,3 @@ jobs:
     steps:
       - name: Deleting Shared Pub Sub
         run: gcloud pubsub topics delete --project=${USER_GCP_PROJECT}  
${{needs.generate_shared_pubsub.outputs.name}}
-        
\ No newline at end of file
diff --git a/contributor-docs/release-guide.md 
b/contributor-docs/release-guide.md
index e8e8ead0902..712e77a25b1 100644
--- a/contributor-docs/release-guide.md
+++ b/contributor-docs/release-guide.md
@@ -801,8 +801,8 @@ You can (optionally) also do additional verification by:
   signature/checksum files of Java artifacts may not contain filenames. Hence
   you might need to compare checksums/signatures manually or modify the files 
by
   appending the filenames.)
-- [ ] Check signatures (e.g. `gpg --verify apache-beam-1.2.3-python.zip.asc
-  apache-beam-1.2.3-python.zip`)
+- [ ] Check signatures (e.g. `gpg --verify apache-beam-1.2.3-python.tar.gz.asc
+  apache-beam-1.2.3-python.tar.gz`)
 - [ ] `grep` for legal headers in each file.
 - [ ] Run all jenkins suites and include links to passing tests in the voting
   email.
@@ -1018,14 +1018,14 @@ write to BigQuery, and create a cluster of machines for 
running containers (for
   * **Verify the hashes**
 
     ```
-    sha512sum -c apache-beam-2.5.0-python.zip.sha512
-    sha512sum -c apache-beam-2.5.0-source-release.zip.sha512
+    sha512sum -c apache-beam-2.5.0-python.tar.gz.sha512
+    sha512sum -c apache-beam-2.5.0-source-release.tar.gz.sha512
     ```
   * **Build SDK**
 
     ```
     sudo apt-get install unzip
-    unzip apache-beam-2.5.0-source-release.zip
+    unzip apache-beam-2.5.0-source-release.tar.gz
     python setup.py sdist
     ```
   * **Setup virtual environment**
diff --git a/release/src/main/python-release/python_release_automation_utils.sh 
b/release/src/main/python-release/python_release_automation_utils.sh
index 2f5a9ac0a5d..337ece8ba64 100644
--- a/release/src/main/python-release/python_release_automation_utils.sh
+++ b/release/src/main/python-release/python_release_automation_utils.sh
@@ -97,7 +97,7 @@ function download_files() {
 
     wget -r -l2 --no-parent -nd -A "$BEAM_PYTHON_SDK_WHL*" $RC_STAGING_URL
   else
-    BEAM_PYTHON_SDK_ZIP="apache-beam-$VERSION.zip"
+    BEAM_PYTHON_SDK_ZIP="apache-beam-$VERSION.tar.gz"
     wget -r -l2 --no-parent -nd -A "$BEAM_PYTHON_SDK_ZIP*" $RC_STAGING_URL
   fi
 }
@@ -128,7 +128,7 @@ function get_sha512_name() {
   if [[ $1 = *"wheel"* ]]; then
     echo $(ls | grep "/*.whl.sha512$")
   else
-    echo $(ls | grep "/*.zip.sha512$")
+    echo $(ls | grep "/*.tar.gz.sha512$")
   fi
 }
 
@@ -142,7 +142,7 @@ function get_asc_name() {
   if [[ $1 = *"wheel"* ]]; then
     echo $(ls | grep "/*.whl.asc$")
   else
-    echo $(ls | grep "/*.zip.asc$")
+    echo $(ls | grep "/*.tar.gz.asc$")
   fi
 }
 
diff --git a/release/src/main/scripts/build_release_candidate.sh 
b/release/src/main/scripts/build_release_candidate.sh
index d0e6310f50a..745d726d765 100755
--- a/release/src/main/scripts/build_release_candidate.sh
+++ b/release/src/main/scripts/build_release_candidate.sh
@@ -193,19 +193,19 @@ if [[ $confirmation = "y" ]]; then
   echo "----------------Downloading Source Release-------------------"
   # GitHub strips the "v" from "v2.29.0" in naming zip and the dir inside it
   RC_DIR="beam-${RELEASE}-RC${RC_NUM}"
-  RC_ZIP="${RC_DIR}.zip"
+  RC_ZIP="${RC_DIR}.tar.gz"
   # We want to strip the -RC1 suffix from the directory name inside the zip
   RELEASE_DIR="beam-${RELEASE}"
 
-  SOURCE_RELEASE_ZIP="apache-beam-${RELEASE}-source-release.zip"
+  SOURCE_RELEASE_ZIP="apache-beam-${RELEASE}-source-release.tar.gz"
   # Check whether there is an existing dist dir
   if (svn ls "${SOURCE_RELEASE_ZIP}"); then
     echo "Removing existing ${SOURCE_RELEASE_ZIP}."
     svn delete "${SOURCE_RELEASE_ZIP}"
   fi
 
-  echo "Downloading: ${GIT_BEAM_ARCHIVE}/${RC_TAG}.zip"
-  wget ${GIT_BEAM_ARCHIVE}/${RC_TAG}.zip  -O "${RC_ZIP}"
+  echo "Downloading: ${GIT_BEAM_ARCHIVE}/${RC_TAG}.tar.gz"
+  wget ${GIT_BEAM_ARCHIVE}/${RC_TAG}.tar.gz  -O "${RC_ZIP}"
 
   unzip "$RC_ZIP"
   rm "$RC_ZIP"
@@ -268,11 +268,11 @@ if [[ $confirmation = "y" ]]; then
 
   cd "${SVN_ARTIFACTS_DIR}"
 
-  echo "------Checking Hash Value for apache-beam-${RELEASE}.zip-----"
-  sha512sum -c "apache-beam-${RELEASE}.zip.sha512"
+  echo "------Checking Hash Value for apache-beam-${RELEASE}.tar.gz-----"
+  sha512sum -c "apache-beam-${RELEASE}.tar.gz.sha512"
 
-  echo "------Signing Source Release apache-beam-${RELEASE}.zip------"
-  gpg --local-user "${SIGNING_KEY}" --armor --detach-sig 
"apache-beam-${RELEASE}.zip"
+  echo "------Signing Source Release apache-beam-${RELEASE}.tar.gz------"
+  gpg --local-user "${SIGNING_KEY}" --armor --detach-sig 
"apache-beam-${RELEASE}.tar.gz"
 
   for artifact in *.whl; do
     echo "----------Checking Hash Value for ${artifact} wheel-----------"
diff --git a/release/src/main/scripts/deploy_release_candidate_pypi.sh 
b/release/src/main/scripts/deploy_release_candidate_pypi.sh
index d1fef8c6078..9ee90212dda 100755
--- a/release/src/main/scripts/deploy_release_candidate_pypi.sh
+++ b/release/src/main/scripts/deploy_release_candidate_pypi.sh
@@ -141,8 +141,8 @@ python3 
"${SCRIPT_DIR}/download_github_actions_artifacts.py" \
 
 cd "${PYTHON_ARTIFACTS_DIR}"
 
-echo "------Checking Hash Value for 
apache-beam-${RELEASE}rc${RC_NUMBER}.zip-----"
-sha512sum -c "apache-beam-${RELEASE}rc${RC_NUMBER}.zip.sha512"
+echo "------Checking Hash Value for 
apache-beam-${RELEASE}rc${RC_NUMBER}.tar.gz-----"
+sha512sum -c "apache-beam-${RELEASE}rc${RC_NUMBER}.tar.gz.sha512"
 
 for artifact in *.whl; do
   echo "----------Checking Hash Value for ${artifact} wheel-----------"
diff --git a/release/src/main/scripts/run_rc_validation.sh 
b/release/src/main/scripts/run_rc_validation.sh
index 7f32c297966..0f2bfe4aaec 100755
--- a/release/src/main/scripts/run_rc_validation.sh
+++ b/release/src/main/scripts/run_rc_validation.sh
@@ -300,14 +300,14 @@ if [[ ("$python_leaderboard_direct" = true \
   cd ${LOCAL_BEAM_DIR}
 
   echo "---------------------Downloading Python Staging 
RC----------------------------"
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip.sha512
-  if [[ ! -f apache-beam-${RELEASE_VER}.zip ]]; then
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz.sha512
+  if [[ ! -f apache-beam-${RELEASE_VER}.tar.gz ]]; then
     { echo "Fail to download Python Staging RC files." ;exit 1; }
   fi
 
   echo "--------------------------Verifying 
Hashes------------------------------------"
-  sha512sum -c apache-beam-${RELEASE_VER}.zip.sha512
+  sha512sum -c apache-beam-${RELEASE_VER}.tar.gz.sha512
 
   echo "--------------------------Updating 
~/.m2/settings.xml-------------------------"
     cd ~
@@ -378,7 +378,7 @@ if [[ ("$python_leaderboard_direct" = true \
     pip install --upgrade pip setuptools wheel
 
     echo "--------------------------Installing Python 
SDK-------------------------------"
-    pip install apache-beam-${RELEASE_VER}.zip[gcp]
+    pip install apache-beam-${RELEASE_VER}.tar.gz[gcp]
 
     echo "----------------Starting Leaderboard with 
DirectRunner-----------------------"
     if [[ "$python_leaderboard_direct" = true ]]; then
@@ -434,7 +434,7 @@ if [[ ("$python_leaderboard_direct" = true \
       --dataset ${LEADERBOARD_DF_DATASET} \
       --runner DataflowRunner \
       --temp_location=${USER_GCS_BUCKET}/temp/ \
-      --sdk_location apache-beam-${RELEASE_VER}.zip; \
+      --sdk_location apache-beam-${RELEASE_VER}.tar.gz; \
       exec bash"
 
       echo "***************************************************************"
@@ -509,7 +509,7 @@ if [[ ("$python_leaderboard_direct" = true \
       --dataset ${GAMESTATS_DF_DATASET} \
       --runner DataflowRunner \
       --temp_location=${USER_GCS_BUCKET}/temp/ \
-      --sdk_location apache-beam-${RELEASE_VER}.zip \
+      --sdk_location apache-beam-${RELEASE_VER}.tar.gz \
       --fixed_window_duration ${FIXED_WINDOW_DURATION}; exec bash"
 
       echo "***************************************************************"
@@ -566,14 +566,14 @@ if [[ ("$python_xlang_quickstart" = true) \
   cd ${LOCAL_BEAM_DIR}
 
   echo "---------------------Downloading Python Staging 
RC----------------------------"
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip.sha512
-  if [[ ! -f apache-beam-${RELEASE_VER}.zip ]]; then
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz.sha512
+  if [[ ! -f apache-beam-${RELEASE_VER}.tar.gz ]]; then
     { echo "Failed to download Python Staging RC files." ;exit 1; }
   fi
 
   echo "--------------------------Verifying 
Hashes------------------------------------"
-  sha512sum -c apache-beam-${RELEASE_VER}.zip.sha512
+  sha512sum -c apache-beam-${RELEASE_VER}.tar.gz.sha512
 
   `which pip` install --upgrade pip
   `which pip` install --upgrade setuptools
@@ -593,7 +593,7 @@ if [[ ("$python_xlang_quickstart" = true) \
     ln -s ${LOCAL_BEAM_DIR}/sdks beam_env_${py_version}/lib/sdks
 
     echo "--------------------------Installing Python 
SDK-------------------------------"
-    pip install apache-beam-${RELEASE_VER}.zip
+    pip install apache-beam-${RELEASE_VER}.tar.gz
 
     echo '************************************************************';
     echo '* Running Python Multi-language Quickstart with DirectRunner';
@@ -672,14 +672,14 @@ if [[ ("$java_xlang_quickstart" = true) \
   cd ${LOCAL_BEAM_DIR}
 
   echo "---------------------Downloading Python Staging 
RC----------------------------"
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip.sha512
-  if [[ ! -f apache-beam-${RELEASE_VER}.zip ]]; then
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz.sha512
+  if [[ ! -f apache-beam-${RELEASE_VER}.tar.gz ]]; then
     { echo "Failed to download Python Staging RC files." ;exit 1; }
   fi
 
   echo "--------------------------Verifying 
Hashes------------------------------------"
-  sha512sum -c apache-beam-${RELEASE_VER}.zip.sha512
+  sha512sum -c apache-beam-${RELEASE_VER}.tar.gz.sha512
 
   `which pip` install --upgrade pip
   `which pip` install --upgrade setuptools
@@ -699,7 +699,7 @@ if [[ ("$java_xlang_quickstart" = true) \
     ln -s ${LOCAL_BEAM_DIR}/sdks beam_env_${py_version}/lib/sdks
 
     echo "--------------------------Installing Python 
SDK-------------------------------"
-    pip install apache-beam-${RELEASE_VER}.zip[dataframe]
+    pip install apache-beam-${RELEASE_VER}.tar.gz[dataframe]
 
     # Deacrivating in the main shell. We will reactivate the virtual 
environment new shells
     # for the expansion service and the job server.
@@ -768,14 +768,14 @@ if [[ ("$python_xlang_kafka_taxi_dataflow" = true
   cd ${LOCAL_BEAM_DIR}
 
   echo "---------------------Downloading Python Staging 
RC----------------------------"
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip
-  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.zip.sha512
-  if [[ ! -f apache-beam-${RELEASE_VER}.zip ]]; then
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz
+  wget 
${PYTHON_RC_DOWNLOAD_URL}/${RELEASE_VER}/python/apache-beam-${RELEASE_VER}.tar.gz.sha512
+  if [[ ! -f apache-beam-${RELEASE_VER}.tar.gz ]]; then
     { echo "Fail to download Python Staging RC files." ;exit 1; }
   fi
 
   echo "--------------------------Verifying 
Hashes------------------------------------"
-  sha512sum -c apache-beam-${RELEASE_VER}.zip.sha512
+  sha512sum -c apache-beam-${RELEASE_VER}.tar.gz.sha512
 
   `which pip` install --upgrade pip
   `which pip` install --upgrade setuptools
@@ -807,7 +807,7 @@ if [[ ("$python_xlang_kafka_taxi_dataflow" = true
     ln -s ${LOCAL_BEAM_DIR}/sdks beam_env_${py_version}/lib/sdks
 
     echo "--------------------------Installing Python 
SDK-------------------------------"
-    pip install apache-beam-${RELEASE_VER}.zip[gcp]
+    pip install apache-beam-${RELEASE_VER}.tar.gz[gcp]
 
     echo "----------------Starting XLang Kafka Taxi with 
DataflowRunner---------------------"
     if [[ "$python_xlang_kafka_taxi_dataflow" = true ]]; then
@@ -837,7 +837,7 @@ if [[ ("$python_xlang_kafka_taxi_dataflow" = true
       --temp_location=${USER_GCS_BUCKET}/temp/ \
       --with_metadata \
       --beam_services=\"{\\\"sdks:java:io:expansion-service:shadowJar\\\": 
\\\"${KAFKA_EXPANSION_SERVICE_JAR}\\\"}\" \
-      --sdk_location apache-beam-${RELEASE_VER}.zip; \
+      --sdk_location apache-beam-${RELEASE_VER}.tar.gz; \
       exec bash"
 
       echo "***************************************************************"
@@ -882,7 +882,7 @@ if [[ ("$python_xlang_kafka_taxi_dataflow" = true
       --temp_location=${USER_GCS_BUCKET}/temp/ \
       --output_topic projects/${USER_GCP_PROJECT}/topics/${SQL_TAXI_TOPIC} \
       
--beam_services=\"{\\\":sdks:java:extensions:sql:expansion-service:shadowJar\\\":
 \\\"${SQL_EXPANSION_SERVICE_JAR}\\\"}\" \
-      --sdk_location apache-beam-${RELEASE_VER}.zip; \
+      --sdk_location apache-beam-${RELEASE_VER}.tar.gz; \
       exec bash"
 
       echo "***************************************************************"
diff --git a/sdks/python/scripts/run_snapshot_publish.sh 
b/sdks/python/scripts/run_snapshot_publish.sh
index 6379e6f2108..bc379077349 100755
--- a/sdks/python/scripts/run_snapshot_publish.sh
+++ b/sdks/python/scripts/run_snapshot_publish.sh
@@ -21,7 +21,7 @@ BUCKET=gs://beam-python-nightly-snapshots
 VERSION=$(awk '/__version__/{print $3}' 
$WORKSPACE/sdks/python/apache_beam/version.py)
 VERSION=$(echo $VERSION | cut -c 2- | rev | cut -c 2- | rev)
 time=$(date +"%Y-%m-%dT%H:%M:%S")
-SNAPSHOT="apache-beam-$VERSION-$time.zip"
+SNAPSHOT="apache-beam-$VERSION-$time.tar.gz"
 
 DEP_SNAPSHOT_ROOT="$BUCKET/dependency_requirements_snapshot"
 DEP_SNAPSHOT_FILE_NAME="beam-py-requirements-$time.txt"
@@ -30,8 +30,8 @@ DEP_SNAPSHOT_FILE_NAME="beam-py-requirements-$time.txt"
 # and located under Gradle build directory.
 cd $WORKSPACE/sdks/python/build
 
-# Rename the file to be apache-beam-{VERSION}-{datetime}.zip
-for file in "apache-beam-$VERSION*.zip"; do
+# Rename the file to be apache-beam-{VERSION}-{datetime}.tar.gz
+for file in "apache-beam-$VERSION*.tar.gz"; do
   mv $file $SNAPSHOT
 done
 

Reply via email to