This is an automated email from the ASF dual-hosted git repository.
capistrant pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git
The following commit(s) were added to refs/heads/master by this push:
new 3d9e3dbad9 Fix hadoop library location for integration tests (#12497)
3d9e3dbad9 is described below
commit 3d9e3dbad9d38b6be42ea29b4cf7bd9d49ce5d77
Author: Jihoon Son <[email protected]>
AuthorDate: Thu Jun 23 08:39:54 2022 -0700
Fix hadoop library location for integration tests (#12497)
---
integration-tests/script/copy_resources_template.sh | 18 ++++++++++++++----
1 file changed, 14 insertions(+), 4 deletions(-)
diff --git a/integration-tests/script/copy_resources_template.sh
b/integration-tests/script/copy_resources_template.sh
index bd074fe088..7dab593c75 100755
--- a/integration-tests/script/copy_resources_template.sh
+++ b/integration-tests/script/copy_resources_template.sh
@@ -40,7 +40,6 @@ mkdir -p $SHARED_DIR/hadoop_xml
mkdir -p $SHARED_DIR/hadoop-dependencies
mkdir -p $SHARED_DIR/logs
mkdir -p $SHARED_DIR/tasklogs
-mkdir -p $SHARED_DIR/docker/extensions
mkdir -p $SHARED_DIR/docker/credentials
# install logging config
@@ -52,16 +51,27 @@ cp src/main/resources/log4j2.xml
$SHARED_DIR/docker/lib/log4j2.xml
# Pull Hadoop dependency if needed
if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [
"$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" == true ]
then
+ # HdfsStorageDruidModule loads all implementations of
org.apache.hadoop.fs.FileSystem using an extension class loader.
+ # This requires for all FileSystem implementations to be installed in
druid-hdfs-storage.
+ DRUID_HDFS_EXT=$SHARED_DIR/docker/extensions/druid-hdfs-storage
+
HADOOP_AWS_DIR=$SHARED_DIR/hadoop-dependencies/hadoop-aws/${hadoop.compile.version}
+
HADOOP_GCS_DIR=$SHARED_DIR/hadoop-dependencies/hadoop-gcs/${hadoop.compile.version}
+
HADOOP_AZURE_DIR=$SHARED_DIR/hadoop-dependencies/hadoop-azure/${hadoop.compile.version}
+ mkdir -p $DRUID_HDFS_EXT
+ mkdir -p $HADOOP_GCS_DIR
## We put same version in both commands but as we have an if, correct code
path will always be executed as this is generated script.
## <TODO> Remove if
- mkdir -p $SHARED_DIR/hadoop-dependencies/hadoop-gcs
if [ -n "${HADOOP_VERSION}" ] && [ "${HADOOP_VERSION:0:1}" == "3" ]; then
java -cp "$SHARED_DIR/docker/lib/*"
-Ddruid.extensions.hadoopDependenciesDir="$SHARED_DIR/hadoop-dependencies"
org.apache.druid.cli.Main tools pull-deps -h
org.apache.hadoop:hadoop-client-api:${hadoop.compile.version} -h
org.apache.hadoop:hadoop-client-runtime:${hadoop.compile.version} -h
org.apache.hadoop:hadoop-aws:${hadoop.compile.version} -h
org.apache.hadoop:hadoop-azure:${hadoop.compile.version}
- curl
https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop3-latest.jar
--output
$SHARED_DIR/hadoop-dependencies/hadoop-gcs/gcs-connector-hadoop3-latest.jar
+ curl
https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop3-latest.jar
--output $HADOOP_GCS_DIR/gcs-connector-hadoop3-latest.jar
+ cp $HADOOP_GCS_DIR/gcs-connector-hadoop3-latest.jar $DRUID_HDFS_EXT
else
java -cp "$SHARED_DIR/docker/lib/*"
-Ddruid.extensions.hadoopDependenciesDir="$SHARED_DIR/hadoop-dependencies"
org.apache.druid.cli.Main tools pull-deps -h
org.apache.hadoop:hadoop-client:${hadoop.compile.version} -h
org.apache.hadoop:hadoop-aws:${hadoop.compile.version} -h
org.apache.hadoop:hadoop-azure:${hadoop.compile.version}
- curl
https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop2-latest.jar
--output
$SHARED_DIR/hadoop-dependencies/hadoop-gcs/gcs-connector-hadoop2-latest.jar
+ curl
https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop2-latest.jar
--output $HADOOP_GCS_DIR/gcs-connector-hadoop2-latest.jar
+ cp $HADOOP_GCS_DIR/gcs-connector-hadoop2-latest.jar $DRUID_HDFS_EXT
fi
+ cp $HADOOP_AWS_DIR/hadoop-aws-${hadoop.compile.version}.jar $DRUID_HDFS_EXT
+ cp $HADOOP_AZURE_DIR/hadoop-azure-${hadoop.compile.version}.jar
$DRUID_HDFS_EXT
fi
# one of the integration tests needs the wikiticker sample data
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]