[FLINK-7973] Add shaded S3 FileSystem end-to-end tests

Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/e9e7c337
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/e9e7c337
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/e9e7c337

Branch: refs/heads/master
Commit: e9e7c3372189db7e933ff59114b9ec6245838eda
Parents: 0e5fb0b
Author: Aljoscha Krettek <aljoscha.kret...@gmail.com>
Authored: Wed Nov 8 17:49:17 2017 +0100
Committer: Aljoscha Krettek <aljoscha.kret...@gmail.com>
Committed: Mon Nov 13 16:37:51 2017 +0100

----------------------------------------------------------------------
 test-infra/end-to-end-test/common.sh            | 53 +++++++++++++---
 .../end-to-end-test/test_shaded_hadoop_s3a.sh   | 64 ++++++++++++++++++++
 .../end-to-end-test/test_shaded_presto_s3.sh    | 64 ++++++++++++++++++++
 tools/travis_mvn_watchdog.sh                    | 12 ++++
 4 files changed, 185 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/e9e7c337/test-infra/end-to-end-test/common.sh
----------------------------------------------------------------------
diff --git a/test-infra/end-to-end-test/common.sh 
b/test-infra/end-to-end-test/common.sh
index 9a1c79c..cc31c90 100644
--- a/test-infra/end-to-end-test/common.sh
+++ b/test-infra/end-to-end-test/common.sh
@@ -92,14 +92,14 @@ function stop_cluster {
     PASS=""
   fi
 
-  for f in `ls $FLINK_DIR/log/*.out`
-  do
-    if [[ -s $f ]]; then
-      echo "Found non-empty file $f"
-      cat $f
-      PASS=""
-    fi
-  done
+  if grep -rv "NativeCodeLoader" $FLINK_DIR/log/*.out \
+      | grep -v  "Unable to load native-hadoop" \
+      | grep -v  "amazonaws" \
+      | grep -i "."; then
+    echo "Found non-empty .out files:"
+    cat $FLINK_DIR/log/*.out
+    PASS=""
+  fi
 
   rm $FLINK_DIR/log/*
 }
@@ -135,3 +135,40 @@ function check_all_pass {
 function clean_data_dir {
   rm -r $TEST_DATA_DIR
 }
+
+function s3_put {
+  local_file=$1
+  bucket=$2
+  s3_file=$3
+  resource="/${bucket}/${s3_file}"
+  contentType="application/octet-stream"
+  dateValue=`date -R`
+  stringToSign="PUT\n\n${contentType}\n${dateValue}\n${resource}"
+  s3Key=$ARTIFACTS_AWS_ACCESS_KEY
+  s3Secret=$ARTIFACTS_AWS_SECRET_KEY
+  signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary 
| base64`
+  curl -X PUT -T "${local_file}" \
+    -H "Host: ${bucket}.s3.amazonaws.com" \
+    -H "Date: ${dateValue}" \
+    -H "Content-Type: ${contentType}" \
+    -H "Authorization: AWS ${s3Key}:${signature}" \
+    https://${bucket}.s3.amazonaws.com/${s3_file}
+}
+
+function s3_delete {
+  bucket=$1
+  s3_file=$2
+  resource="/${bucket}/${s3_file}"
+  contentType="application/octet-stream"
+  dateValue=`date -R`
+  stringToSign="DELETE\n\n${contentType}\n${dateValue}\n${resource}"
+  s3Key=$ARTIFACTS_AWS_ACCESS_KEY
+  s3Secret=$ARTIFACTS_AWS_SECRET_KEY
+  signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary 
| base64`
+  curl -X DELETE \
+    -H "Host: ${bucket}.s3.amazonaws.com" \
+    -H "Date: ${dateValue}" \
+    -H "Content-Type: ${contentType}" \
+    -H "Authorization: AWS ${s3Key}:${signature}" \
+    https://${bucket}.s3.amazonaws.com/${s3_file}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/e9e7c337/test-infra/end-to-end-test/test_shaded_hadoop_s3a.sh
----------------------------------------------------------------------
diff --git a/test-infra/end-to-end-test/test_shaded_hadoop_s3a.sh 
b/test-infra/end-to-end-test/test_shaded_hadoop_s3a.sh
new file mode 100755
index 0000000..90bf73b
--- /dev/null
+++ b/test-infra/end-to-end-test/test_shaded_hadoop_s3a.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+################################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+################################################################################
+
+if [[ -z "$ARTIFACTS_AWS_BUCKET" ]]; then
+    echo "Did not find AWS environment variables, NOT running Shaded Hadoop 
S3A e2e tests."
+    exit 0
+else
+    echo "Found AWS bucket $ARTIFACTS_AWS_BUCKET, running Shaded Hadoop S3A 
e2e tests."
+fi
+
+# Tests for our shaded/bundled Hadoop S3A file system.
+
+set -e
+set -o pipefail
+
+# Convert relative path to absolute path
+TEST_ROOT=`pwd`
+TEST_INFRA_DIR="$0"
+TEST_INFRA_DIR=`dirname "$TEST_INFRA_DIR"`
+cd $TEST_INFRA_DIR
+TEST_INFRA_DIR=`pwd`
+cd $TEST_ROOT
+
+. "$TEST_INFRA_DIR"/common.sh
+
+s3_put $TEST_INFRA_DIR/test-data/words $ARTIFACTS_AWS_BUCKET 
flink-end-to-end-test-shaded-s3a
+
+cp $FLINK_DIR/opt/flink-s3-fs-hadoop-*.jar $FLINK_DIR/lib/
+echo "s3.access-key: $ARTIFACTS_AWS_ACCESS_KEY" >> 
"$FLINK_DIR/conf/flink-conf.yaml"
+echo "s3.secret-key: $ARTIFACTS_AWS_SECRET_KEY" >> 
"$FLINK_DIR/conf/flink-conf.yaml"
+
+start_cluster
+
+$FLINK_DIR/bin/flink run -p 1 $FLINK_DIR/examples/batch/WordCount.jar --input 
s3:/$resource --output $TEST_DATA_DIR/out/wc_out
+
+check_result_hash "WordCountWithShadedS3A" $TEST_DATA_DIR/out/wc_out 
"72a690412be8928ba239c2da967328a5"
+
+# remove any leftover settings
+sed -i -e 's/s3.access-key: .*//' "$FLINK_DIR/conf/flink-conf.yaml"
+sed -i -e 's/s3.secret-key: .*//' "$FLINK_DIR/conf/flink-conf.yaml"
+
+rm $FLINK_DIR/lib/flink-s3-fs*.jar
+
+s3_delete $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-s3a
+
+stop_cluster
+clean_data_dir
+check_all_pass

http://git-wip-us.apache.org/repos/asf/flink/blob/e9e7c337/test-infra/end-to-end-test/test_shaded_presto_s3.sh
----------------------------------------------------------------------
diff --git a/test-infra/end-to-end-test/test_shaded_presto_s3.sh 
b/test-infra/end-to-end-test/test_shaded_presto_s3.sh
new file mode 100755
index 0000000..6ded115
--- /dev/null
+++ b/test-infra/end-to-end-test/test_shaded_presto_s3.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+################################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+################################################################################
+
+if [[ -z "$ARTIFACTS_AWS_BUCKET" ]]; then
+    echo "Did not find AWS environment variables, NOT running Shaded Presto S3 
e2e tests."
+    exit 0
+else
+    echo "Found AWS bucket $ARTIFACTS_AWS_BUCKET, running Shaded Presto S3 e2e 
tests."
+fi
+
+# Tests for our shaded/bundled Hadoop S3A file system.
+
+set -e
+set -o pipefail
+
+# Convert relative path to absolute path
+TEST_ROOT=`pwd`
+TEST_INFRA_DIR="$0"
+TEST_INFRA_DIR=`dirname "$TEST_INFRA_DIR"`
+cd $TEST_INFRA_DIR
+TEST_INFRA_DIR=`pwd`
+cd $TEST_ROOT
+
+. "$TEST_INFRA_DIR"/common.sh
+
+s3_put $TEST_INFRA_DIR/test-data/words $ARTIFACTS_AWS_BUCKET 
flink-end-to-end-test-shaded-presto-s3
+
+cp $FLINK_DIR/opt/flink-s3-fs-presto-*.jar $FLINK_DIR/lib/
+echo "s3.access-key: $ARTIFACTS_AWS_ACCESS_KEY" >> 
"$FLINK_DIR/conf/flink-conf.yaml"
+echo "s3.secret-key: $ARTIFACTS_AWS_SECRET_KEY" >> 
"$FLINK_DIR/conf/flink-conf.yaml"
+
+start_cluster
+
+$FLINK_DIR/bin/flink run -p 1 $FLINK_DIR/examples/batch/WordCount.jar --input 
s3:/$resource --output $TEST_DATA_DIR/out/wc_out
+
+check_result_hash "WordCountWithShadedPrestoS3" $TEST_DATA_DIR/out/wc_out 
"72a690412be8928ba239c2da967328a5"
+
+# remove any leftover settings
+sed -i -e 's/s3.access-key: .*//' "$FLINK_DIR/conf/flink-conf.yaml"
+sed -i -e 's/s3.secret-key: .*//' "$FLINK_DIR/conf/flink-conf.yaml"
+
+rm $FLINK_DIR/lib/flink-s3-fs*.jar
+
+s3_delete $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-presto-s3
+
+stop_cluster
+clean_data_dir
+check_all_pass

http://git-wip-us.apache.org/repos/asf/flink/blob/e9e7c337/tools/travis_mvn_watchdog.sh
----------------------------------------------------------------------
diff --git a/tools/travis_mvn_watchdog.sh b/tools/travis_mvn_watchdog.sh
index 9e2c27a..0417cd3 100755
--- a/tools/travis_mvn_watchdog.sh
+++ b/tools/travis_mvn_watchdog.sh
@@ -544,6 +544,18 @@ case $TEST in
                        printf 
"==============================================================================\n"
                        
test-infra/end-to-end-test/test_streaming_classloader.sh build-target cluster
                        EXIT_CODE=$(($EXIT_CODE+$?))
+
+                       printf 
"\n==============================================================================\n"
+                       printf "Running Shaded Hadoop S3A end-to-end test\n"
+                       printf 
"==============================================================================\n"
+                       test-infra/end-to-end-test/test_shaded_hadoop_s3a.sh 
build-target cluster
+                       EXIT_CODE=$(($EXIT_CODE+$?))
+
+                       printf 
"\n==============================================================================\n"
+                       printf "Running Shaded Presto S3 end-to-end test\n"
+                       printf 
"==============================================================================\n"
+                       test-infra/end-to-end-test/test_shaded_presto_s3.sh 
build-target cluster
+                       EXIT_CODE=$(($EXIT_CODE+$?))
                else
                        printf 
"\n==============================================================================\n"
                        printf "Previous build failure detected, skipping 
end-to-end tests.\n"

Reply via email to