[
https://issues.apache.org/jira/browse/BEAM-3906?focusedWorklogId=129915&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-129915
]
ASF GitHub Bot logged work on BEAM-3906:
----------------------------------------
Author: ASF GitHub Bot
Created on: 01/Aug/18 19:22
Start Date: 01/Aug/18 19:22
Worklog Time Spent: 10m
Work Description: aaltay closed pull request #4943: [BEAM-3906] Automate
Validation Aganist Python Wheel
URL: https://github.com/apache/beam/pull/4943
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/.test-infra/jenkins/job_ReleaseCandidate_Python.groovy
b/.test-infra/jenkins/job_ReleaseCandidate_Python.groovy
index 0102e0b7fb1..4df59b146e6 100644
--- a/.test-infra/jenkins/job_ReleaseCandidate_Python.groovy
+++ b/.test-infra/jenkins/job_ReleaseCandidate_Python.groovy
@@ -21,9 +21,6 @@ import CommonJobProperties as commonJobProperties
job('beam_PostRelease_Python_Candidate') {
description('Runs verification of the Python release candidate.')
- // Execute concurrent builds if necessary.
- concurrentBuild()
-
// Set common parameters.
commonJobProperties.setTopLevelMainJobProperties(delegate)
@@ -35,8 +32,7 @@ job('beam_PostRelease_Python_Candidate') {
// Execute shell command to test Python SDK.
steps {
- shell('cd ' + commonJobProperties.checkoutDir +
- ' && bash
release/src/main/groovy/run_release_candidate_python_quickstart.sh' +
- ' && bash
release/src/main/groovy/run_release_candidate_python_mobile_gaming.sh')
+ shell('cd ' + commonJobProperties.checkoutDir +
+ ' && bash
release/src/main/python-release/python_release_automation.sh')
}
}
diff --git a/release/src/main/groovy/python_release_automation_utils.sh
b/release/src/main/groovy/python_release_automation_utils.sh
deleted file mode 100644
index 554f3aa5693..00000000000
--- a/release/src/main/groovy/python_release_automation_utils.sh
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-set -e
-set -v
-
-#######################################
-# Print Separators.
-# Arguments:
-# Info to be printed.
-# Outputs:
-# Writes info to stdout.
-#######################################
-function print_separator() {
- echo
"############################################################################"
- echo $1
- echo
"############################################################################"
-}
-
-#######################################
-# Update gcloud version.
-# Arguments:
-# None
-#######################################
-function update_gcloud() {
- curl
https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-189.0.0-linux-x86_64.tar.gz
\
- --output gcloud.tar.gz
- tar xf gcloud.tar.gz
- ./google-cloud-sdk/install.sh --quiet
- . ./google-cloud-sdk/path.bash.inc
- gcloud components update --quiet || echo 'gcloud components update failed'
- gcloud -v
-}
-
-#######################################
-# Get Python SDK version from sdk/python/apache_beam/version.py.
-# Arguments:
-# None
-# Outputs:
-# Writes version to stdout.
-#######################################
-function get_version() {
- version=$(awk '/__version__/{print $3}' sdks/python/apache_beam/version.py)
- if [[ $version = *".dev"* ]]; then
- echo $version | cut -c 2- | rev | cut -d'.' -f2- | rev
- else
- echo $version
- fi
-}
-
-#######################################
-# Publish data to Pubsub topic for streaming wordcount examples.
-# Arguments:
-# None
-#######################################
-function run_pubsub_publish(){
- words=("hello world!", "I like cats!", "Python", "hello Python", "hello
Python")
- for word in ${words[@]}; do
- gcloud pubsub topics publish $PUBSUB_TOPIC1 --message "$word"
- done
- sleep 10
-}
-
-#######################################
-# Pull data from Pubsub.
-# Arguments:
-# None
-#######################################
-function run_pubsub_pull() {
- gcloud pubsub subscriptions pull --project=$PROJECT_ID
$PUBSUB_SUBSCRIPTION --limit=100 --auto-ack
-}
-
-#######################################
-# Create Pubsub topics and subscription.
-# Arguments:
-# None
-#######################################
-function create_pubsub() {
- gcloud pubsub topics create --project=$PROJECT_ID $PUBSUB_TOPIC1
- gcloud pubsub topics create --project=$PROJECT_ID $PUBSUB_TOPIC2
- gcloud pubsub subscriptions create --project=$PROJECT_ID
$PUBSUB_SUBSCRIPTION --topic $PUBSUB_TOPIC2
-}
-
-#######################################
-# Remove Pubsub topics and subscription.
-# Arguments:
-# None
-#######################################
-function cleanup_pubsub() {
- gcloud pubsub topics delete --project=$PROJECT_ID $PUBSUB_TOPIC1
- gcloud pubsub topics delete --project=$PROJECT_ID $PUBSUB_TOPIC2
- gcloud pubsub subscriptions delete --project=$PROJECT_ID
$PUBSUB_SUBSCRIPTION
-}
-
-
-# Python RC configurations
-VERSION=$(get_version)
-CANDIDATE_URL="https://dist.apache.org/repos/dist/dev/beam/$VERSION/"
-BEAM_PYTHON_SDK="apache-beam-$VERSION-python.zip"
-
-# Cloud Configurations
-PROJECT_ID='apache-beam-testing'
-BUCKET_NAME='temp-storage-for-release-validation-tests/nightly-snapshot-validation'
-TEMP_DIR='/tmp'
-DATASET='beam_postrelease_mobile_gaming'
-NUM_WORKERS=1
-
-# Quickstart configurations
-SHA1_FILE_NAME="apache-beam-$VERSION-python.zip.sha1"
-ASC_FILE_NAME="apache-beam-$VERSION-python.zip.asc"
-
-WORDCOUNT_OUTPUT='wordcount_direct.txt'
-PUBSUB_TOPIC1='wordstream-python-topic-1'
-PUBSUB_TOPIC2='wordstream-python-topic-2'
-PUBSUB_SUBSCRIPTION='wordstream-python-sub2'
-
-# Mobile Gaming Configurations
-DATASET='beam_postrelease_mobile_gaming'
-USERSCORE_OUTPUT_PREFIX='python-userscore_result'
diff --git
a/release/src/main/groovy/run_release_candidate_python_mobile_gaming.sh
b/release/src/main/groovy/run_release_candidate_python_mobile_gaming.sh
deleted file mode 100755
index fc6683386a7..00000000000
--- a/release/src/main/groovy/run_release_candidate_python_mobile_gaming.sh
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This file will verify Apache/Beam release candidate python by following
steps:
-#
-# 1. Create a new virtualenv and install the SDK
-# 2. Run UserScore examples with DirectRunner
-# 3. Run UserScore examples with DataflowRunner
-# 4. Run HourlyTeamScore on DirectRunner
-# 5. Run HourlyTeamScore on DataflowRunner
-#
-
-set -e
-set -v
-
-source release/src/main/groovy/python_release_automation_utils.sh
-
-#######################################
-# Remove temp directory when complete.
-# Globals:
-# TMPDIR
-# Arguments:
-# None
-#######################################
-function complete() {
- print_separator "Validation $1"
- rm -rf $TMPDIR
-}
-
-#######################################
-# Verify results of user_score.
-# Globals:
-# BUCKET_NAME
-# Arguments:
-# $1: Runner - direct, dataflow
-#######################################
-function verify_user_score() {
- expected_output_file_name="$USERSCORE_OUTPUT_PREFIX-$1-runner.txt"
- actual_output_files=$(ls)
- if [[ $1 = *"dataflow"* ]]; then
- actual_output_files=$(gsutil ls gs://$BUCKET_NAME)
-
expected_output_file_name="gs://$BUCKET_NAME/$expected_output_file_name"
- fi
- echo $actual_output_files
- if [[ $actual_output_files != *$expected_output_file_name* ]]
- then
- echo "ERROR: The userscore example failed on $1-runner".
- complete "failed when running userscore example with $1-runner."
- exit 1
- fi
-
- if [[ $1 = *"dataflow"* ]]; then
- gsutil rm $expected_output_file_name*
- fi
- echo "SUCCEED: user_score successfully run on $1-runner."
-}
-
-#######################################
-# Verify results of hourly_team_score.
-# Globals:
-# DATASET
-# Arguments:
-# Runner - direct, dataflow
-#######################################
-function verify_hourly_team_score() {
- retry=3
- should_see='AntiqueBrassPlatypus'
- while(( $retry >= 0 )); do
- if [[ $retry > 0 ]]; then
- bq_pull_result=$(bq head -n 100
$DATASET.hourly_team_score_python_$1)
- if [[ $bq_pull_result = *"$should_see"* ]]; then
- echo "SUCCEED: hourly_team_score example successful run on
$1-runner"
- break
- else
- retry=$(($retry-1))
- echo "Did not find team scores, retry left: $retry"
- sleep 15
- fi
- else
- echo "FAILED: HourlyTeamScore example failed running on $1-runner.
\
- Did not found scores of team $should_see in
$DATASET.leader_board"
- complete "FAILED"
- exit 1
- fi
- done
-}
-
-
-print_separator "Start Mobile Gaming Examples"
-echo "SDK version: $VERSION"
-
-TMPDIR=$(mktemp -d)
-echo $TMPDIR
-pushd $TMPDIR
-
-#
-# 1. Download files from RC staging location, install python sdk
-#
-
-wget $CANDIDATE_URL$BEAM_PYTHON_SDK
-print_separator "Creating new virtualenv and installing the SDK"
-virtualenv temp_virtualenv
-. temp_virtualenv/bin/activate
-gcloud_version=$(gcloud --version | head -1 | awk '{print $4}')
-if [[ "$gcloud_version" < "189" ]]; then
- update_gcloud
-fi
-pip install $BEAM_PYTHON_SDK[gcp]
-
-
-#
-# 2. Run UserScore with DirectRunner
-#
-
-print_separator "Running userscore example with DirectRunner"
-output_file_name="$USERSCORE_OUTPUT_PREFIX-direct-runner.txt"
-python -m apache_beam.examples.complete.game.user_score \
---output=$output_file_name \
---project=$PROJECT_ID \
---dataset=$DATASET \
---input=gs://$BUCKET_NAME/5000_gaming_data.csv
-
-verify_user_score "direct"
-
-
-#
-# 3. Run UserScore with DataflowRunner
-#
-
-print_separator "Running userscore example with DataflowRunner"
-output_file_name="$USERSCORE_OUTPUT_PREFIX-dataflow-runner.txt"
-python -m apache_beam.examples.complete.game.user_score \
---project=$PROJECT_ID \
---runner=DataflowRunner \
---temp_location=gs://$BUCKET_NAME/temp/ \
---sdk_location=$BEAM_PYTHON_SDK \
---input=gs://$BUCKET_NAME/5000_gaming_data.csv \
---output=gs://$BUCKET_NAME/$output_file_name
-
-verify_user_score "dataflow"
-
-
-#
-# 4. Run HourlyTeamScore with DirectRunner
-#
-
-print_separator "Running HourlyTeamScore example with DirectRunner"
-python -m apache_beam.examples.complete.game.hourly_team_score \
---project=$PROJECT_ID \
---dataset=$DATASET \
---input=gs://$BUCKET_NAME/5000_gaming_data.csv \
---table="hourly_team_score_python_direct"
-
-verify_hourly_team_score "direct"
-
-
-#
-# 5. Run HourlyTeamScore with DataflowRunner
-#
-
-print_separator "Running HourlyTeamScore example with DataflowRunner"
-python -m apache_beam.examples.complete.game.hourly_team_score \
---project=$PROJECT_ID \
---dataset=$DATASET \
---runner=DataflowRunner \
---temp_location=gs://$BUCKET_NAME/temp/ \
---sdk_location $BEAM_PYTHON_SDK \
---input=gs://$BUCKET_NAME/5000_gaming_data.csv \
---table="hourly_team_score_python_dataflow"
-
-verify_hourly_team_score "dataflow"
-
-complete "SUCCEED: Mobile Gaming Verification Complete"
diff --git a/release/src/main/groovy/run_release_candidate_python_quickstart.sh
b/release/src/main/groovy/run_release_candidate_python_quickstart.sh
deleted file mode 100755
index 914c2f24a31..00000000000
--- a/release/src/main/groovy/run_release_candidate_python_quickstart.sh
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This file will verify Apache/Beam release candidate python by following
steps:
-#
-# 1. Download files from RC staging location
-# 2. Verify hashes
-# 3. Create a new virtualenv and install the SDK
-# 4. Run Wordcount examples with DirectRunner
-# 5. Run Wordcount examples with DataflowRunner
-# 6. Run streaming wordcount on DirectRunner
-# 7. Run streaming wordcount on DataflowRunner
-#
-
-set -e
-set -v
-
-source release/src/main/groovy/python_release_automation_utils.sh
-
-#######################################
-# Remove temp directory when complete.
-# Globals:
-# TMPDIR
-# Arguments:
-# None
-#######################################
-function complete() {
- print_separator "Validation $1"
- rm -rf $TMPDIR
-}
-
-#######################################
-# Verify results of hourly_team_score.
-# Globals:
-# DATASET
-# Arguments:
-# $1 - runner type: DirectRunner, DataflowRunner
-# $2 - pid: the pid of running pipeline
-# $3 - running_job (DataflowRunner only): the job id of streaming pipeline
running on DataflowRunner
-#######################################
-function verify_steaming_result() {
- retry=3
- should_see="Python: "
- while(( $retry > 0 )); do
- pull_result=$(run_pubsub_pull)
- if [[ $pull_result = *"$should_see"* ]]; then
- echo "SUCCEED: The streaming wordcount example running
successfully on $1."
- break
- else
- if [[ $retry > 0 ]]; then
- retry=$(($retry-1))
- echo "retry left: $retry"
- sleep 15
- else
- echo "ERROR: The streaming wordcount example failed on $1."
- cleanup_pubsub
- kill -9 $2
- if [[ $1 = "DataflowRunner" ]]; then
- gcloud dataflow jobs cancel $3
- fi
- complete "failed when running streaming wordcount example with
$1."
- exit 1
- fi
- fi
- done
-}
-
-print_separator "Start Quickstarts Examples"
-echo "SDK version: $VERSION"
-
-TMPDIR=$(mktemp -d)
-echo $TMPDIR
-pushd $TMPDIR
-
-#
-# 1. Download files from RC staging location
-#
-
-wget $CANDIDATE_URL$SHA1_FILE_NAME
-wget $CANDIDATE_URL$ASC_FILE_NAME
-wget $CANDIDATE_URL$BEAM_PYTHON_SDK
-
-#
-# 2. Verify sha1, md5 hashes and gpg signature
-#
-
-print_separator "Checking sha1 and md5 hashes"
-hash_check=$(sha1sum -c $SHA1_FILE_NAME | head -1 |awk '{print $2}')
-if [[ "$hash_check" != "OK" ]]
-then
- echo "ERROR: The sha1 hash doesn't match."
- complete "The sha1 hash doesn't match."
- exit 1
-fi
-echo "SUCCEED: Hashes verification completed."
-
-wget https://dist.apache.org/repos/dist/dev/beam/KEYS
-gpg --import KEYS
-gpg --verify $ASC_FILE_NAME $BEAM_PYTHON_SDK
-
-
-#
-# 3. create a new virtualenv and install the SDK
-#
-
-print_separator "Creating new virtualenv and installing the SDK"
-virtualenv temp_virtualenv
-. temp_virtualenv/bin/activate
-gcloud_version=$(gcloud --version | head -1 | awk '{print $4}')
-if [[ "$gcloud_version" < "189" ]]; then
- update_gcloud
-fi
-pip install $BEAM_PYTHON_SDK[gcp]
-
-
-#
-# 4. Run wordcount with DirectRunner
-#
-
-print_separator "Running wordcount example with DirectRunner"
-python -m apache_beam.examples.wordcount --output wordcount_direct.txt
-if ls wordcount_direct.txt* 1> /dev/null 2>&1; then
- echo "Found output file(s):"
- ls wordcount_direct.txt*
-else
- echo "ERROR: output file not found."
- complete "failed when running wordcount example with DirectRunner."
- exit 1
-fi
-echo "SUCCEED: wordcount successfully run on DirectRunner."
-
-
-#
-# 5. Run wordcount with DataflowRunner
-#
-
-print_separator "Running wordcount example with DataflowRunner "
-python -m apache_beam.examples.wordcount \
- --output gs://$BUCKET_NAME/$WORDCOUNT_OUTPUT \
- --staging_location gs://$BUCKET_NAME$TEMP_DIR \
- --temp_location gs://$BUCKET_NAME$TEMP_DIR \
- --runner DataflowRunner \
- --job_name wordcount \
- --project $PROJECT_ID \
- --num_workers $NUM_WORKERS \
- --sdk_location $BEAM_PYTHON_SDK
-
-# verify results.
-wordcount_output_in_gcs="gs://$BUCKET_NAME/$WORDCOUNT_OUTPUT"
-gcs_pull_result=$(gsutil ls gs://$BUCKET_NAME)
-if [[ $gcs_pull_result != *$wordcount_output_in_gcs* ]]; then
- echo "ERROR: The wordcount example failed on DataflowRunner".
- complete "failed when running wordcount example with DataflowRunner."
- exit 1
-fi
-
-# clean output files from GCS
-gsutil rm gs://$BUCKET_NAME/$WORDCOUNT_OUTPUT-*
-echo "SUCCEED: wordcount successfully run on DataflowRunner."
-
-
-#
-# 6. Run Streaming wordcount with DirectRunner
-#
-
-create_pubsub
-print_separator "Running Streaming wordcount example with DirectRunner"
-python -m apache_beam.examples.streaming_wordcount \
---input_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC1 \
---output_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC2 \
---streaming &
-pid=$!
-sleep 15
-
-# verify result
-run_pubsub_publish
-verify_steaming_result "DirectRunner" $pid
-
-# Delete the pubsub topics and subscription before running the second job.
Will recreate them in the second job.
-cleanup_pubsub
-kill -9 $pid
-sleep 10
-
-
-#
-# 7. Run Streaming Wordcount with DataflowRunner
-#
-
-create_pubsub
-print_separator "Running Streaming wordcount example with DataflowRunner "
-python -m apache_beam.examples.streaming_wordcount \
- --streaming \
- --job_name pyflow-wordstream-candidate \
- --project $PROJECT_ID \
- --runner DataflowRunner \
- --input_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC1 \
- --output_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC2 \
- --staging_location gs://$BUCKET_NAME$TEMP_DIR \
- --temp_location gs://$BUCKET_NAME$TEMP_DIR \
- --num_workers $NUM_WORKERS \
- --sdk_location $BEAM_PYTHON_SDK &
-
-pid=$!
-sleep 60
-running_job=$(gcloud dataflow jobs list | grep pyflow-wordstream-candidate |
grep Running | cut -d' ' -f1)
-
-# verify result
-run_pubsub_publish
-sleep 420
-verify_steaming_result "DataflowRunner" $pid $running_job
-
-kill -9 $pid
-gcloud dataflow jobs cancel $running_job
-cleanup_pubsub
-
-complete "SUCCEED: Quickstart Verification Complete"
diff --git a/release/src/main/python-release/python_release_automation.sh
b/release/src/main/python-release/python_release_automation.sh
new file mode 100644
index 00000000000..bc81cf54b88
--- /dev/null
+++ b/release/src/main/python-release/python_release_automation.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+source
release/src/main/python-release/run_release_candidate_python_quickstart.sh
+source
release/src/main/python-release/run_release_candidate_python_mobile_gaming.sh
+
+run_release_candidate_python_quickstart "tar"
+run_release_candidate_python_mobile_gaming "tar"
+run_release_candidate_python_quickstart "wheel"
+run_release_candidate_python_mobile_gaming "wheel"
diff --git a/release/src/main/python-release/python_release_automation_utils.sh
b/release/src/main/python-release/python_release_automation_utils.sh
new file mode 100644
index 00000000000..91d5977839b
--- /dev/null
+++ b/release/src/main/python-release/python_release_automation_utils.sh
@@ -0,0 +1,297 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+set -e
+set -v
+
+#######################################
+# Print Separators.
+# Arguments:
+# Info to be printed.
+# Outputs:
+# Writes info to stdout.
+#######################################
+function print_separator() {
+ echo
"############################################################################"
+ echo $1
+ echo
"############################################################################"
+}
+
+
+#######################################
+# Update gcloud version.
+# Arguments:
+# None
+#######################################
+function update_gcloud() {
+ curl
https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-189.0.0-linux-x86_64.tar.gz
\
+ --output gcloud.tar.gz
+ tar xf gcloud.tar.gz
+ ./google-cloud-sdk/install.sh --quiet
+ . ./google-cloud-sdk/path.bash.inc
+ gcloud components update --quiet || echo 'gcloud components update failed'
+ gcloud -v
+}
+
+
+#######################################
+# Get Python SDK version from sdk/python/apache_beam/version.py.
+# Arguments:
+# None
+# Outputs:
+# Writes releasing version to stdout.
+# e.g. __version__ = '2.5.0' => 2.5.0
+# e.g. __version__ = '2.6.0.dev' => 2.5.0
+#######################################
+function get_version() {
+ version=$(awk '/__version__/{print $3}' sdks/python/apache_beam/version.py)
+ version=$(echo $version | cut -c 2- | rev | cut -c 2- | rev)
+ if [[ $version = *".dev"* ]]; then
+ version=$(echo $version | rev | cut -d'.' -f2- | rev)
+ IFS='.' read -r -a array <<< "$version"
+ minor_version=$((${array[1]}-1))
+ version="${array[0]}.$minor_version.${array[2]}"
+ fi
+ echo $version
+}
+
+
+#######################################
+# Download files including SDK, SHA512 and ASC.
+# Globals:
+# BEAM_PYTHON_SDK*
+# Arguments:
+# $1 - SDK type: tar, wheel
+#######################################
+function download_files() {
+ if [[ $1 = *"wheel"* ]]; then
+ wget -r -l2 --no-parent -nd -A "$BEAM_PYTHON_SDK_WHL*" $RC_STAGING_URL
+ else
+ wget -r -l2 --no-parent -nd -A "$BEAM_PYTHON_SDK_ZIP*" $RC_STAGING_URL
+ fi
+}
+
+
+#######################################
+# Stdout python sdk name.
+# Globals:
+# BEAM_PYTHON_SDK_ZIP
+# Arguments:
+# $1 - SDK type: tar, wheel
+#######################################
+function get_sdk_name() {
+ sdk_name=$BEAM_PYTHON_SDK_ZIP
+ if [[ $1 = *"wheel"* ]]; then
+ sdk_name=$(ls | grep "/*.whl$")
+ fi
+ echo $sdk_name
+}
+
+
+#######################################
+# Stdout sha512 file name.
+# Arguments:
+# $1 - SDK type: tar, wheel
+#######################################
+function get_sha512_name() {
+ if [[ $1 = *"wheel"* ]]; then
+ echo $(ls | grep "/*.whl.sha512$")
+ else
+ echo $(ls | grep "/*.zip.sha512$")
+ fi
+}
+
+
+#######################################
+# Stdout ASC file name.
+# Arguments:
+# $1 - SDK type: tar, wheel
+#######################################
+function get_asc_name() {
+ if [[ $1 = *"wheel"* ]]; then
+ echo $(ls | grep "/*.whl.asc$")
+ else
+ echo $(ls | grep "/*.zip.asc$")
+ fi
+}
+
+
+#######################################
+# Publish data to Pubsub topic for streaming wordcount examples.
+# Arguments:
+# None
+#######################################
+function run_pubsub_publish(){
+ words=("hello world!", "I like cats!", "Python", "hello Python", "hello
Python")
+ for word in ${words[@]}; do
+ gcloud pubsub topics publish $PUBSUB_TOPIC1 --message "$word"
+ done
+ sleep 10
+}
+
+#######################################
+# Pull data from Pubsub.
+# Arguments:
+# None
+#######################################
+function run_pubsub_pull() {
+ gcloud pubsub subscriptions pull --project=$PROJECT_ID $PUBSUB_SUBSCRIPTION
--limit=100 --auto-ack
+}
+
+
+#######################################
+# Create Pubsub topics and subscription.
+# Arguments:
+# None
+#######################################
+function create_pubsub() {
+ gcloud pubsub topics create --project=$PROJECT_ID $PUBSUB_TOPIC1
+ gcloud pubsub topics create --project=$PROJECT_ID $PUBSUB_TOPIC2
+ gcloud pubsub subscriptions create --project=$PROJECT_ID
$PUBSUB_SUBSCRIPTION --topic $PUBSUB_TOPIC2
+}
+
+
+#######################################
+# Remove Pubsub topics and subscription.
+# Arguments:
+# None
+#######################################
+function cleanup_pubsub() {
+ gcloud pubsub topics delete --project=$PROJECT_ID $PUBSUB_TOPIC1
+ gcloud pubsub topics delete --project=$PROJECT_ID $PUBSUB_TOPIC2
+ gcloud pubsub subscriptions delete --project=$PROJECT_ID $PUBSUB_SUBSCRIPTION
+}
+
+
+#######################################
+# Verify results of streaming_wordcount.
+# Arguments:
+# $1 - runner type: DirectRunner, DataflowRunner
+# $2 - pid: the pid of running pipeline
+# $3 - running_job (DataflowRunner only): the job id of streaming pipeline
running on DataflowRunner
+#######################################
+function verify_steaming_result() {
+ retry=3
+ should_see="Python: "
+ while(( $retry > 0 )); do
+ pull_result=$(run_pubsub_pull)
+ if [[ $pull_result = *"$should_see"* ]]; then
+ echo "SUCCEED: The streaming wordcount example running successfully on
$1."
+ break
+ else
+ if [[ $retry > 0 ]]; then
+ retry=$(($retry-1))
+ echo "retry left: $retry"
+ sleep 15
+ else
+ echo "ERROR: The streaming wordcount example failed on $1."
+ cleanup_pubsub
+ kill -9 $2
+ if [[ $1 = "DataflowRunner" ]]; then
+ gcloud dataflow jobs cancel $3
+ fi
+ complete "failed when running streaming wordcount example with $1."
+ exit 1
+ fi
+ fi
+ done
+}
+
+
+#######################################
+# Verify results of user_score.
+# Globals:
+# BUCKET_NAME
+# Arguments:
+# $1: Runner - direct, dataflow
+#######################################
+function verify_user_score() {
+ expected_output_file_name="$USERSCORE_OUTPUT_PREFIX-$1-runner.txt"
+ actual_output_files=$(ls)
+ if [[ $1 = *"dataflow"* ]]; then
+ actual_output_files=$(gsutil ls gs://$BUCKET_NAME)
+ expected_output_file_name="gs://$BUCKET_NAME/$expected_output_file_name"
+ fi
+ echo $actual_output_files
+ if [[ $actual_output_files != *$expected_output_file_name* ]]
+ then
+ echo "ERROR: The userscore example failed on $1-runner".
+ complete "failed when running userscore example with $1-runner."
+ exit 1
+ fi
+
+ if [[ $1 = *"dataflow"* ]]; then
+ gsutil rm $expected_output_file_name*
+ fi
+ echo "SUCCEED: user_score successfully run on $1-runner."
+}
+
+
+#######################################
+# Verify results of hourly_team_score.
+# Globals:
+# DATASET
+# Arguments:
+# Runner - direct, dataflow
+#######################################
+function verify_hourly_team_score() {
+ retry=3
+ should_see='AntiqueBrassPlatypus'
+ while(( $retry >= 0 )); do
+ if [[ $retry > 0 ]]; then
+ bq_pull_result=$(bq head -n 500 $DATASET.hourly_team_score_python_$1)
+ if [[ $bq_pull_result = *"$should_see"* ]]; then
+ echo "SUCCEED: hourly_team_score example successful run on $1-runner"
+ break
+ else
+ retry=$(($retry-1))
+ echo "Did not find team scores, retry left: $retry"
+ sleep 15
+ fi
+ else
+ echo "FAILED: HourlyTeamScore example failed running on $1-runner. \
+ Did not found scores of team $should_see in $DATASET.leader_board"
+ complete "FAILED"
+ exit 1
+ fi
+ done
+}
+
+
+# Python RC configurations
+VERSION=$(get_version)
+RC_STAGING_URL="https://dist.apache.org/repos/dist/dev/beam/$VERSION/"
+BEAM_PYTHON_SDK_ZIP="apache-beam-$VERSION.zip"
+BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp27-cp27mu-manylinux1_x86_64.whl"
+
+# Cloud Configurations
+PROJECT_ID='apache-beam-testing'
+BUCKET_NAME='temp-storage-for-release-validation-tests/nightly-snapshot-validation'
+TEMP_DIR='/tmp'
+DATASET='beam_postrelease_mobile_gaming'
+NUM_WORKERS=1
+
+WORDCOUNT_OUTPUT='wordcount_direct.txt'
+PUBSUB_TOPIC1='wordstream-python-topic-1'
+PUBSUB_TOPIC2='wordstream-python-topic-2'
+PUBSUB_SUBSCRIPTION='wordstream-python-sub2'
+
+# Mobile Gaming Configurations
+DATASET='beam_postrelease_mobile_gaming'
+USERSCORE_OUTPUT_PREFIX='python-userscore_result'
diff --git
a/release/src/main/python-release/run_release_candidate_python_mobile_gaming.sh
b/release/src/main/python-release/run_release_candidate_python_mobile_gaming.sh
new file mode 100755
index 00000000000..e9a8566415d
--- /dev/null
+++
b/release/src/main/python-release/run_release_candidate_python_mobile_gaming.sh
@@ -0,0 +1,187 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file will verify Apache/Beam release candidate python by following
steps:
+#
+# 1. Create a new virtualenv and install the SDK
+# 2. Run UserScore examples with DirectRunner
+# 3. Run UserScore examples with DataflowRunner
+# 4. Run HourlyTeamScore on DirectRunner
+# 5. Run HourlyTeamScore on DataflowRunner
+#
+
+set -e
+set -v
+
+source release/src/main/python-release/python_release_automation_utils.sh
+
+# Assign default values
+BEAM_PYTHON_SDK=$BEAM_PYTHON_SDK_ZIP
+
+
+#######################################
+# Remove temp directory when complete.
+# Globals:
+# TMPDIR
+# Arguments:
+# None
+#######################################
+function complete() {
+ print_separator "Validation $1"
+ rm -rf $TMPDIR
+}
+
+
+#######################################
+# Download files from RC staging location, install python sdk
+# Globals:
+# BEAM_PYTHON_SDK
+# Arguments:
+# None
+#######################################
+function install_sdk() {
+ print_separator "Creating new virtualenv and installing the SDK"
+ virtualenv temp_virtualenv
+ . temp_virtualenv/bin/activate
+ gcloud_version=$(gcloud --version | head -1 | awk '{print $4}')
+ if [[ "$gcloud_version" < "189" ]]; then
+ update_gcloud
+ fi
+ pip install google-compute-engine
+ pip install $BEAM_PYTHON_SDK[gcp]
+}
+
+
+#######################################
+# Run UserScore with DirectRunner
+# Globals:
+# USERSCORE_OUTPUT_PREFIX, DATASET, BUCKET_NAME
+# Arguments:
+# None
+#######################################
+function verify_userscore_direct() {
+ print_separator "Running userscore example with DirectRunner"
+ output_file_name="$USERSCORE_OUTPUT_PREFIX-direct-runner.txt"
+ python -m apache_beam.examples.complete.game.user_score \
+ --output=$output_file_name \
+ --project=$PROJECT_ID \
+ --dataset=$DATASET \
+ --input=gs://$BUCKET_NAME/5000_gaming_data.csv
+
+ verify_user_score "direct"
+}
+
+
+#######################################
+# Run UserScore with DataflowRunner
+# Globals:
+# USERSCORE_OUTPUT_PREFIX, PROJECT_ID, DATASET
+# BEAM_PYTHON_SDK, BUCKET_NAME
+# Arguments:
+# None
+#######################################
+function verify_userscore_dataflow() {
+ print_separator "Running userscore example with DataflowRunner"
+ output_file_name="$USERSCORE_OUTPUT_PREFIX-dataflow-runner.txt"
+ python -m apache_beam.examples.complete.game.user_score \
+ --project=$PROJECT_ID \
+ --runner=DataflowRunner \
+ --temp_location=gs://$BUCKET_NAME/temp/ \
+ --sdk_location=$BEAM_PYTHON_SDK \
+ --input=gs://$BUCKET_NAME/5000_gaming_data.csv \
+ --output=gs://$BUCKET_NAME/$output_file_name
+
+ verify_user_score "dataflow"
+}
+
+
+#######################################
+# Run HourlyTeamScore with DirectRunner
+# Globals:
+# PROJECT_ID, DATASET BUCKET_NAME
+# Arguments:
+# None
+#######################################
+function verify_hourlyteamscore_direct() {
+ print_separator "Running HourlyTeamScore example with DirectRunner"
+ python -m apache_beam.examples.complete.game.hourly_team_score \
+ --project=$PROJECT_ID \
+ --dataset=$DATASET \
+ --input=gs://$BUCKET_NAME/5000_gaming_data.csv \
+ --table="hourly_team_score_python_direct"
+
+ verify_hourly_team_score "direct"
+}
+
+
+#######################################
+# Run HourlyTeamScore with DataflowRunner
+# Globals:
+# PROJECT_ID, DATASET
+# BEAM_PYTHON_SDK, BUCKET_NAME
+# Arguments:
+# None
+#######################################
+function verify_hourlyteamscore_dataflow() {
+ print_separator "Running HourlyTeamScore example with DataflowRunner"
+ python -m apache_beam.examples.complete.game.hourly_team_score \
+ --project=$PROJECT_ID \
+ --dataset=$DATASET \
+ --runner=DataflowRunner \
+ --temp_location=gs://$BUCKET_NAME/temp/ \
+ --sdk_location $BEAM_PYTHON_SDK \
+ --input=gs://$BUCKET_NAME/5000_gaming_data.csv \
+ --table="hourly_team_score_python_dataflow"
+
+ verify_hourly_team_score "dataflow"
+}
+
+
+#######################################
+# Main function.
+# This function validates Python RC MobileGaming in following steps:
+# 1. Create a new virtualenv and install the SDK
+# 2. Run UserScore examples with DirectRunner
+# 3. Run UserScore examples with DataflowRunner
+# 4. Run HourlyTeamScore on DirectRunner
+# 5. Run HourlyTeamScore on DataflowRunner
+# Globals:
+# VERSION
+# Arguments:
+# $1 - sdk types: [tar, wheel]
+#######################################
+function run_release_candidate_python_mobile_gaming() {
+ print_separator "Start Mobile Gaming Examples"
+ echo "SDK version: $VERSION"
+
+ TMPDIR=$(mktemp -d)
+ echo $TMPDIR
+ pushd $TMPDIR
+
+ download_files $1
+ # get exact names of sdk and other files
+ BEAM_PYTHON_SDK=$(get_sdk_name $1)
+
+ install_sdk
+ verify_userscore_direct
+ verify_userscore_dataflow
+ verify_hourlyteamscore_direct
+ verify_hourlyteamscore_dataflow
+
+ complete "SUCCEED: Mobile Gaming Verification Complete"
+}
diff --git
a/release/src/main/python-release/run_release_candidate_python_quickstart.sh
b/release/src/main/python-release/run_release_candidate_python_quickstart.sh
new file mode 100755
index 00000000000..9656a140e8e
--- /dev/null
+++ b/release/src/main/python-release/run_release_candidate_python_quickstart.sh
@@ -0,0 +1,259 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file will verify Apache/Beam release candidate python by following
steps:
+#
+# 1. Download files from RC staging location
+# 2. Verify hashes
+# 3. Create a new virtualenv and install the SDK
+# 4. Run Wordcount examples with DirectRunner
+# 5. Run Wordcount examples with DataflowRunner
+# 6. Run streaming wordcount on DirectRunner
+# 7. Run streaming wordcount on DataflowRunner
+#
+
+set -e
+set -v
+
+source release/src/main/python-release/python_release_automation_utils.sh
+
+# Assign default values
+BEAM_PYTHON_SDK=$BEAM_PYTHON_SDK_ZIP
+ASC_FILE_NAME=$BEAM_PYTHON_SDK_ZIP".asc"
+SHA512_FILE_NAME=$BEAM_PYTHON_SDK_ZIP".sha512"
+
+
+#######################################
+# Remove temp directory when complete.
+# Globals:
+# TMPDIR
+# Arguments:
+# None
+#######################################
+function complete() {
+ print_separator "Validation $1"
+ rm -rf $TMPDIR
+}
+
+
+#######################################
+# Verify sha512 hash and gpg signature
+# Globals:
+# ASC_FILE_NAME, SHA512_FILE_NAME, BEAM_PYTHON_SDK
+# Arguments:
+# None
+#######################################
+function verify_hash() {
+ print_separator "Checking sha512 hash and gpg signature"
+ hash_check=$(sha512sum -c $SHA512_FILE_NAME | head -1 |awk '{print $2}')
+ if [[ "$hash_check" != "OK" ]]
+ then
+ echo "ERROR: The sha512 hash doesn't match."
+ complete "The sha512 hash doesn't match."
+ exit 1
+ fi
+ echo "SUCCEED: Hashes verification completed."
+
+ wget https://dist.apache.org/repos/dist/dev/beam/KEYS
+ gpg --import KEYS
+ gpg --verify $ASC_FILE_NAME $BEAM_PYTHON_SDK
+ echo "test place 1"
+ gsutil version -l
+}
+
+
+#######################################
+# Create a new virtualenv and install the SDK
+# Globals:
+# BEAM_PYTHON_SDK
+# Arguments:
+# None
+#######################################
+function install_sdk() {
+ print_separator "Creating new virtualenv and installing the SDK"
+ echo "test place 2"
+ gsutil version -l
+ virtualenv temp_virtualenv
+ . temp_virtualenv/bin/activate
+ gcloud_version=$(gcloud --version | head -1 | awk '{print $4}')
+ if [[ "$gcloud_version" < "300" ]]; then
+ update_gcloud
+ fi
+ pip install google-compute-engine
+ pip install $BEAM_PYTHON_SDK[gcp]
+}
+
+
+#######################################
+# Run wordcount with DirectRunner
+# Arguments:
+# None
+#######################################
+function verify_wordcount_direct() {
+ print_separator "Running wordcount example with DirectRunner"
+ python -m apache_beam.examples.wordcount --output wordcount_direct.txt
+ if ls wordcount_direct.txt* 1> /dev/null 2>&1; then
+ echo "Found output file(s):"
+ ls wordcount_direct.txt*
+ else
+ echo "ERROR: output file not found."
+ complete "failed when running wordcount example with DirectRunner."
+ exit 1
+ fi
+ echo "SUCCEED: wordcount successfully run on DirectRunner."
+}
+
+
+#######################################
+# Run wordcount with DataflowRunner
+# Globals:
+# BUCKET_NAME, WORDCOUNT_OUTPUT, TEMP_DIR
+# PROJECT_ID, NUM_WORKERS, BEAM_PYTHON_SDK
+# Arguments:
+# None
+#######################################
+function verify_wordcount_dataflow() {
+ print_separator "Running wordcount example with DataflowRunner "
+ python -m apache_beam.examples.wordcount \
+ --output gs://$BUCKET_NAME/$WORDCOUNT_OUTPUT \
+ --staging_location gs://$BUCKET_NAME$TEMP_DIR \
+ --temp_location gs://$BUCKET_NAME$TEMP_DIR \
+ --runner DataflowRunner \
+ --job_name wordcount \
+ --project $PROJECT_ID \
+ --num_workers $NUM_WORKERS \
+ --sdk_location $BEAM_PYTHON_SDK
+
+# verify results.
+ wordcount_output_in_gcs="gs://$BUCKET_NAME/$WORDCOUNT_OUTPUT"
+ gcs_pull_result=$(gsutil ls gs://$BUCKET_NAME)
+ if [[ $gcs_pull_result != *$wordcount_output_in_gcs* ]]; then
+ echo "ERROR: The wordcount example failed on DataflowRunner".
+ complete "failed when running wordcount example with DataflowRunner."
+ exit 1
+ fi
+
+ # clean output files from GCS
+ gsutil rm gs://$BUCKET_NAME/$WORDCOUNT_OUTPUT-*
+ echo "SUCCEED: wordcount successfully run on DataflowRunner."
+}
+
+
+#######################################
+# Run Streaming wordcount with DirectRunner
+# Globals:
+# PROJECT_ID, PUBSUB_TOPIC1, PUBSUB_TOPIC2
+# Arguments:
+# None
+#######################################
+function verify_streaming_wordcount_direct() {
+ create_pubsub
+ print_separator "Running Streaming wordcount example with DirectRunner"
+ python -m apache_beam.examples.streaming_wordcount \
+ --input_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC1 \
+ --output_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC2 \
+ --streaming &
+ pid=$!
+ sleep 15
+
+# verify result
+ run_pubsub_publish
+ verify_steaming_result "DirectRunner" $pid
+
+# Delete the pubsub topics and subscription before running the second job.
Will recreate them in the second job.
+ cleanup_pubsub
+ kill -9 $pid
+ sleep 10
+}
+
+
+#######################################
+# Run Streaming Wordcount with DataflowRunner
+# Globals:
+# PROJECT_ID, PUBSUB_TOPIC1, PUBSUB_TOPIC2
+# BUCKET_NAME, TEMP_DIR, NUM_WORKERS, BEAM_PYTHON_SDK
+# Arguments:
+# None
+#######################################
+function verify_streaming_wordcount_dataflow() {
+ create_pubsub
+ print_separator "Running Streaming wordcount example with DataflowRunner "
+ python -m apache_beam.examples.streaming_wordcount \
+ --streaming \
+ --job_name pyflow-wordstream-candidate \
+ --project $PROJECT_ID \
+ --runner DataflowRunner \
+ --input_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC1 \
+ --output_topic projects/$PROJECT_ID/topics/$PUBSUB_TOPIC2 \
+ --staging_location gs://$BUCKET_NAME$TEMP_DIR \
+ --temp_location gs://$BUCKET_NAME$TEMP_DIR \
+ --num_workers $NUM_WORKERS \
+ --sdk_location $BEAM_PYTHON_SDK &
+
+ pid=$!
+ sleep 60
+ running_job=$(gcloud dataflow jobs list | grep pyflow-wordstream-candidate |
grep Running | cut -d' ' -f1)
+
+ # verify result
+ run_pubsub_publish
+ sleep 420
+ verify_steaming_result "DataflowRunner" $pid $running_job
+
+ kill -9 $pid
+ gcloud dataflow jobs cancel $running_job
+ cleanup_pubsub
+}
+
+
+#######################################
+# Main function.
+# This function validates Python RC Quickstart in following steps:
+# 1. Download files from RC staging location
+# 2. Verify hashes
+# 3. Create a new virtualenv and install the SDK
+# 4. Run Wordcount examples with DirectRunner
+# 5. Run Wordcount examples with DataflowRunner
+# 6. Run streaming wordcount on DirectRunner
+# 7. Run streaming wordcount on DataflowRunner
+# Globals:
+# VERSION
+# Arguments:
+# $1 - sdk types: [tar, wheel]
+#######################################
+function run_release_candidate_python_quickstart(){
+ print_separator "Start Quickstarts Examples"
+ echo "SDK version: $VERSION"
+ TMPDIR=$(mktemp -d)
+ echo $TMPDIR
+ pushd $TMPDIR
+
+ download_files $1
+ # get exact names of sdk and other files
+ BEAM_PYTHON_SDK=$(get_sdk_name $1)
+ ASC_FILE_NAME=$(get_asc_name $1)
+ SHA512_FILE_NAME=$(get_sha512_name $1)
+
+ verify_hash
+ install_sdk
+ verify_wordcount_direct
+ verify_wordcount_dataflow
+ verify_streaming_wordcount_direct
+ verify_streaming_wordcount_dataflow
+
+ complete "SUCCEED: Quickstart Verification Complete"
+}
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
Issue Time Tracking
-------------------
Worklog Id: (was: 129915)
Time Spent: 35h 40m (was: 35.5h)
> Get Python Wheel Validation Automated
> -------------------------------------
>
> Key: BEAM-3906
> URL: https://issues.apache.org/jira/browse/BEAM-3906
> Project: Beam
> Issue Type: Sub-task
> Components: examples-python, testing
> Reporter: yifan zou
> Assignee: yifan zou
> Priority: Major
> Time Spent: 35h 40m
> Remaining Estimate: 0h
>
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)