This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch FLINK-34324
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 1f7ff2fdcac221998dba93bc76bbad3a82037c7f
Author: Matthias Pohl <[email protected]>
AuthorDate: Wed Feb 7 11:46:45 2024 +0100

    [hotfix][ci] Adds Minio-support to the FileSink e2e tests
---
 flink-end-to-end-tests/run-nightly-tests.sh           |  2 ++
 flink-end-to-end-tests/test-scripts/test_file_sink.sh | 14 +++++++-------
 2 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/flink-end-to-end-tests/run-nightly-tests.sh 
b/flink-end-to-end-tests/run-nightly-tests.sh
index 57f4fa22f7b..6d9f20366a1 100755
--- a/flink-end-to-end-tests/run-nightly-tests.sh
+++ b/flink-end-to-end-tests/run-nightly-tests.sh
@@ -191,8 +191,10 @@ function run_group_2 {
 
     if [[ ${PROFILE} != *"enable-adaptive-scheduler"* ]]; then # FLINK-21400
       run_test "Streaming File Sink end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_file_sink.sh local StreamingFileSink" 
"skip_check_exceptions"
+      run_test "Streaming File Sink (Minio) end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_file_sink.sh minio StreamingFileSink" 
"skip_check_exceptions"
       run_test "Streaming File Sink s3 end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_file_sink.sh s3 StreamingFileSink" 
"skip_check_exceptions"
       run_test "New File Sink end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_file_sink.sh local FileSink" 
"skip_check_exceptions"
+      run_test "New File Sink end-to-end (Minio) test" 
"$END_TO_END_DIR/test-scripts/test_file_sink.sh minio FileSink" 
"skip_check_exceptions"
       run_test "New File Sink s3 end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_file_sink.sh s3 FileSink" 
"skip_check_exceptions"
 
       run_test "Stateful stream job upgrade end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_stateful_stream_job_upgrade.sh 2 4"
diff --git a/flink-end-to-end-tests/test-scripts/test_file_sink.sh 
b/flink-end-to-end-tests/test-scripts/test_file_sink.sh
index 5ed1fda2c68..79b97495b7e 100755
--- a/flink-end-to-end-tests/test-scripts/test_file_sink.sh
+++ b/flink-end-to-end-tests/test-scripts/test_file_sink.sh
@@ -61,9 +61,13 @@ function get_total_number_of_valid_lines {
 
 if [ "${OUT_TYPE}" == "local" ]; then
   echo "[INFO] Test run in local environment: No S3 environment is not loaded."
-elif [ "${OUT_TYPE}" == "s3" ]; then
-  # the s3 context requires additional
-  source "$(dirname "$0")"/common_s3.sh
+elif [ "${OUT_TYPE}" == "s3" ] || [ "${OUT_TYPE}" == "minio" ]; then
+  if [ "${OUT_TYPE}" == "s3" ]; then
+    source "$(dirname "$0")"/common_s3.sh
+  else
+    source "$(dirname "$0")"/common_s3_minio.sh
+  fi
+
   s3_setup hadoop
 
   # overwrites JOB_OUTPUT_PATH to point to S3
@@ -90,7 +94,6 @@ elif [ "${OUT_TYPE}" == "s3" ]; then
   function out_cleanup {
     s3_delete_by_full_path_prefix "${S3_DATA_PREFIX}"
     s3_delete_by_full_path_prefix "${S3_CHECKPOINT_PREFIX}"
-    rollback_openssl_lib
   }
 
   on_exit out_cleanup
@@ -100,10 +103,7 @@ else
 fi
 
 # randomly set up openSSL with dynamically/statically linked libraries
-OPENSSL_LINKAGE=$(if (( RANDOM % 2 )) ; then echo "dynamic"; else echo 
"static"; fi)
-echo "Executing test with ${OPENSSL_LINKAGE} openSSL linkage (random selection 
between 'dynamic' and 'static')"
 
-set_conf_ssl "mutual" "OPENSSL" "${OPENSSL_LINKAGE}"
 set_config_key "metrics.fetcher.update-interval" "2000"
 # this test relies on global failovers
 set_config_key "jobmanager.execution.failover-strategy" "full"

Reply via email to