This is an automated email from the ASF dual-hosted git repository.
cwylie pushed a commit to branch 0.18.0
in repository https://gitbox.apache.org/repos/asf/druid.git
The following commit(s) were added to refs/heads/0.18.0 by this push:
new ef124dc S3 ingestion spec should not uses the default credentials
provider chain when environment value password provider is misconfigured.
(#9552) (#9558)
ef124dc is described below
commit ef124dcace15de7715f70887b6e7a708979549bc
Author: Maytas Monsereenusorn <[email protected]>
AuthorDate: Tue Mar 24 21:12:16 2020 -0700
S3 ingestion spec should not uses the default credentials provider chain
when environment value password provider is misconfigured. (#9552) (#9558)
* fix s3 optional cred
* S3 ingestion spec uses the default credentials provider chain when
environment value password provider is misconfigured.
* fix failing test
---
.travis.yml | 4 +-
.../druid/data/input/s3/S3InputSourceConfig.java | 4 +-
integration-tests/pom.xml | 6 +
.../clients/OverlordResourceTestClient.java | 35 ++-
.../java/org/apache/druid/tests/TestNGGroup.java | 5 +-
...AbstractAzureInputSourceParallelIndexTest.java} | 2 +-
...> AbstractGcsInputSourceParallelIndexTest.java} | 2 +-
...=> AbstractS3InputSourceParallelIndexTest.java} | 2 +-
.../indexer/ITAzureToAzureParallelIndexTest.java | 2 +-
.../indexer/ITAzureToHdfsParallelIndexTest.java | 2 +-
.../tests/indexer/ITGcsToGcsParallelIndexTest.java | 2 +-
.../indexer/ITGcsToHdfsParallelIndexTest.java | 2 +-
.../indexer/ITS3OverrideCredentialsIndexTest.java | 289 +++++++++++++++++++++
.../tests/indexer/ITS3ToHdfsParallelIndexTest.java | 2 +-
.../tests/indexer/ITS3ToS3ParallelIndexTest.java | 2 +-
.../wikipedia_override_credentials_index_task.json | 85 ++++++
16 files changed, 428 insertions(+), 18 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index dc80ab0..e7582d5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -344,7 +344,7 @@ jobs:
name: "(Compile=openjdk8, Run=openjdk8) other integration test"
jdk: openjdk8
services: *integration_test_services
- env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage'
JVM_RUNTIME='-Djvm.runtime=8'
+ env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion'
JVM_RUNTIME='-Djvm.runtime=8'
script: *run_integration_test
after_failure: *integration_test_diags
# END - Integration tests for Compile with Java 8 and Run with Java 8
@@ -383,7 +383,7 @@ jobs:
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) other integration test"
jdk: openjdk8
- env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage'
JVM_RUNTIME='-Djvm.runtime=11'
+ env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion'
JVM_RUNTIME='-Djvm.runtime=11'
# END - Integration tests for Compile with Java 8 and Run with Java 11
- name: "security vulnerabilities"
diff --git
a/extensions-core/s3-extensions/src/main/java/org/apache/druid/data/input/s3/S3InputSourceConfig.java
b/extensions-core/s3-extensions/src/main/java/org/apache/druid/data/input/s3/S3InputSourceConfig.java
index fd27b99..4543449 100644
---
a/extensions-core/s3-extensions/src/main/java/org/apache/druid/data/input/s3/S3InputSourceConfig.java
+++
b/extensions-core/s3-extensions/src/main/java/org/apache/druid/data/input/s3/S3InputSourceConfig.java
@@ -67,9 +67,7 @@ public class S3InputSourceConfig
public boolean isCredentialsConfigured()
{
return accessKeyId != null &&
- accessKeyId.getPassword() != null &&
- secretAccessKey != null &&
- secretAccessKey.getPassword() != null;
+ secretAccessKey != null;
}
@Override
diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml
index 7aa13e0..d9cc62d 100644
--- a/integration-tests/pom.xml
+++ b/integration-tests/pom.xml
@@ -38,6 +38,12 @@
<version>${project.parent.version}</version>
</dependency>
<dependency>
+ <groupId>com.amazonaws</groupId>
+ <artifactId>aws-java-sdk-s3</artifactId>
+ <version>${aws.sdk.version}</version>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>druid-s3-extensions</artifactId>
<version>${project.parent.version}</version>
diff --git
a/integration-tests/src/main/java/org/apache/druid/testing/clients/OverlordResourceTestClient.java
b/integration-tests/src/main/java/org/apache/druid/testing/clients/OverlordResourceTestClient.java
index 2143302..6ad6411 100644
---
a/integration-tests/src/main/java/org/apache/druid/testing/clients/OverlordResourceTestClient.java
+++
b/integration-tests/src/main/java/org/apache/druid/testing/clients/OverlordResourceTestClient.java
@@ -25,6 +25,7 @@ import com.google.common.base.Predicates;
import com.google.inject.Inject;
import org.apache.druid.client.indexing.TaskStatusResponse;
import org.apache.druid.indexer.TaskState;
+import org.apache.druid.indexer.TaskStatusPlus;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.RetryUtils;
import org.apache.druid.java.util.common.StringUtils;
@@ -108,7 +109,7 @@ public class OverlordResourceTestClient
}
}
- public TaskState getTaskStatus(String taskID)
+ public TaskStatusPlus getTaskStatus(String taskID)
{
try {
StatusResponseHolder response = makeRequest(
@@ -127,7 +128,7 @@ public class OverlordResourceTestClient
{
}
);
- return taskStatusResponse.getStatus().getStatusCode();
+ return taskStatusResponse.getStatus();
}
catch (Exception e) {
throw new RuntimeException(e);
@@ -186,7 +187,7 @@ public class OverlordResourceTestClient
@Override
public Boolean call()
{
- TaskState status = getTaskStatus(taskID);
+ TaskState status = getTaskStatus(taskID).getStatusCode();
if (status == TaskState.FAILED) {
throw new ISE("Indexer task FAILED");
}
@@ -200,6 +201,34 @@ public class OverlordResourceTestClient
);
}
+ public void waitUntilTaskFails(final String taskID)
+ {
+ waitUntilTaskFails(taskID, 10000, 60);
+ }
+
+
+ public void waitUntilTaskFails(final String taskID, final int millisEach,
final int numTimes)
+ {
+ ITRetryUtil.retryUntil(
+ new Callable<Boolean>()
+ {
+ @Override
+ public Boolean call()
+ {
+ TaskState status = getTaskStatus(taskID).getStatusCode();
+ if (status == TaskState.SUCCESS) {
+ throw new ISE("Indexer task SUCCEED");
+ }
+ return status == TaskState.FAILED;
+ }
+ },
+ true,
+ millisEach,
+ numTimes,
+ taskID
+ );
+ }
+
public String submitSupervisor(String spec)
{
try {
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
index 873cfcb..79be37b 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
@@ -54,5 +54,8 @@ public class TestNGGroup
// See integration-tests/docker/environment-configs/override-examples/hdfs
for env vars to provide.
// Additionally, hadoop docker must be started by passing
-Dstart.hadoop.docker=true to mvn.
public static final String HDFS_DEEP_STORAGE = "hdfs-deep-storage";
-
+ // This group is not part of CI. To run this group, s3 configs/credentials
for your s3 must be provided in a file.
+ // The path of the file must then be pass to mvn with
-Doverride.config.path=<PATH_TO_FILE>
+ // See integration-tests/docker/environment-configs/override-examples/s3 for
env vars to provide.
+ public static final String S3_INGESTION = "s3-ingestion";
}
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractAzureInputSourceSimpleIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractAzureInputSourceParallelIndexTest.java
similarity index 97%
rename from
integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractAzureInputSourceSimpleIndexTest.java
rename to
integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractAzureInputSourceParallelIndexTest.java
index 49049f3..1bc4801 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractAzureInputSourceSimpleIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractAzureInputSourceParallelIndexTest.java
@@ -31,7 +31,7 @@ import java.util.List;
import java.util.UUID;
import java.util.function.Function;
-public abstract class AbstractAzureInputSourceSimpleIndexTest extends
AbstractITBatchIndexTest
+public abstract class AbstractAzureInputSourceParallelIndexTest extends
AbstractITBatchIndexTest
{
private static final String INDEX_TASK =
"/indexer/wikipedia_cloud_index_task.json";
private static final String INDEX_QUERIES_RESOURCE =
"/indexer/wikipedia_index_queries.json";
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractGcsInputSourceSimpleIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractGcsInputSourceParallelIndexTest.java
similarity index 97%
rename from
integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractGcsInputSourceSimpleIndexTest.java
rename to
integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractGcsInputSourceParallelIndexTest.java
index da5c1e0..f5c192d 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractGcsInputSourceSimpleIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractGcsInputSourceParallelIndexTest.java
@@ -31,7 +31,7 @@ import java.util.List;
import java.util.UUID;
import java.util.function.Function;
-public abstract class AbstractGcsInputSourceSimpleIndexTest extends
AbstractITBatchIndexTest
+public abstract class AbstractGcsInputSourceParallelIndexTest extends
AbstractITBatchIndexTest
{
private static final String INDEX_TASK =
"/indexer/wikipedia_cloud_index_task.json";
private static final String INDEX_QUERIES_RESOURCE =
"/indexer/wikipedia_index_queries.json";
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractS3InputSourceSimpleIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractS3InputSourceParallelIndexTest.java
similarity index 97%
rename from
integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractS3InputSourceSimpleIndexTest.java
rename to
integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractS3InputSourceParallelIndexTest.java
index eba61c2..077eab4 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractS3InputSourceSimpleIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractS3InputSourceParallelIndexTest.java
@@ -31,7 +31,7 @@ import java.util.List;
import java.util.UUID;
import java.util.function.Function;
-public abstract class AbstractS3InputSourceSimpleIndexTest extends
AbstractITBatchIndexTest
+public abstract class AbstractS3InputSourceParallelIndexTest extends
AbstractITBatchIndexTest
{
private static final String INDEX_TASK =
"/indexer/wikipedia_cloud_index_task.json";
private static final String INDEX_QUERIES_RESOURCE =
"/indexer/wikipedia_index_queries.json";
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToAzureParallelIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToAzureParallelIndexTest.java
index fe3a99f..e8f76ef 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToAzureParallelIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToAzureParallelIndexTest.java
@@ -39,7 +39,7 @@ import java.util.List;
*/
@Test(groups = TestNGGroup.AZURE_DEEP_STORAGE)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITAzureToAzureParallelIndexTest extends
AbstractAzureInputSourceSimpleIndexTest
+public class ITAzureToAzureParallelIndexTest extends
AbstractAzureInputSourceParallelIndexTest
{
@Test(dataProvider = "resources")
public void testAzureIndexData(Pair<String, List> azureInputSource) throws
Exception
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToHdfsParallelIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToHdfsParallelIndexTest.java
index c3b529e..91ccd61 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToHdfsParallelIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAzureToHdfsParallelIndexTest.java
@@ -41,7 +41,7 @@ import java.util.List;
*/
@Test(groups = TestNGGroup.HDFS_DEEP_STORAGE)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITAzureToHdfsParallelIndexTest extends
AbstractAzureInputSourceSimpleIndexTest
+public class ITAzureToHdfsParallelIndexTest extends
AbstractAzureInputSourceParallelIndexTest
{
@Test(dataProvider = "resources")
public void testAzureIndexData(Pair<String, List> azureInputSource) throws
Exception
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToGcsParallelIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToGcsParallelIndexTest.java
index 30886d2..a41afb7 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToGcsParallelIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToGcsParallelIndexTest.java
@@ -40,7 +40,7 @@ import java.util.List;
*/
@Test(groups = TestNGGroup.GCS_DEEP_STORAGE)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITGcsToGcsParallelIndexTest extends
AbstractGcsInputSourceSimpleIndexTest
+public class ITGcsToGcsParallelIndexTest extends
AbstractGcsInputSourceParallelIndexTest
{
@Test(dataProvider = "resources")
public void testGcsIndexData(Pair<String, List> gcsInputSource) throws
Exception
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToHdfsParallelIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToHdfsParallelIndexTest.java
index 49b7efe..e3420d0 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToHdfsParallelIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITGcsToHdfsParallelIndexTest.java
@@ -42,7 +42,7 @@ import java.util.List;
*/
@Test(groups = TestNGGroup.HDFS_DEEP_STORAGE)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITGcsToHdfsParallelIndexTest extends
AbstractGcsInputSourceSimpleIndexTest
+public class ITGcsToHdfsParallelIndexTest extends
AbstractGcsInputSourceParallelIndexTest
{
@Test(dataProvider = "resources")
public void testGcsIndexData(Pair<String, List> gcsInputSource) throws
Exception
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3OverrideCredentialsIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3OverrideCredentialsIndexTest.java
new file mode 100644
index 0000000..ff18b95
--- /dev/null
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3OverrideCredentialsIndexTest.java
@@ -0,0 +1,289 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.tests.indexer;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import org.apache.druid.indexer.TaskState;
+import org.apache.druid.indexer.TaskStatusPlus;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.tests.TestNGGroup;
+import org.testng.Assert;
+import org.testng.annotations.Guice;
+import org.testng.annotations.Test;
+
+import java.io.Closeable;
+import java.util.UUID;
+import java.util.function.Function;
+
+/**
+ * IMPORTANT:
+ * To run this test, you must:
+ * 1) Set the bucket and path for your data. This can be done by setting
-Ddruid.test.config.cloudBucket and
+ * -Ddruid.test.config.cloudPath or setting "cloud_bucket" and "cloud_path"
in the config file.
+ * 2) Copy wikipedia_index_data1.json, wikipedia_index_data2.json, and
wikipedia_index_data3.json
+ * located in integration-tests/src/test/resources/data/batch_index to your
S3 at the location set in step 1.
+ * 3) Provide -Doverride.config.path=<PATH_TO_FILE> with s3
credentials/configs set. See
+ * integration-tests/docker/environment-configs/override-examples/s3 for
env vars to provide.
+ * Note that druid_s3_accessKey and druid_s3_secretKey should be unset or
set to credentials that does not have
+ * access to the bucket and path specified in #1. The credentials that does
have access to the bucket and path
+ * specified in #1 should be set to the env variable OVERRIDE_S3_ACCESS_KEY
and OVERRIDE_S3_SECRET_KEY
+ */
+@Test(groups = TestNGGroup.S3_INGESTION)
+@Guice(moduleFactory = DruidTestModuleFactory.class)
+public class ITS3OverrideCredentialsIndexTest extends AbstractITBatchIndexTest
+{
+ private static final String INDEX_TASK_WITH_OVERRIDE =
"/indexer/wikipedia_override_credentials_index_task.json";
+ private static final String INDEX_TASK_WITHOUT_OVERRIDE =
"/indexer/wikipedia_cloud_simple_index_task.json";
+ private static final String INDEX_QUERIES_RESOURCE =
"/indexer/wikipedia_index_queries.json";
+ private static final String INDEX_DATASOURCE = "wikipedia_index_test_" +
UUID.randomUUID();
+ private static final String INPUT_SOURCE_OBJECTS_KEY = "objects";
+ private static final String WIKIPEDIA_DATA_1 = "wikipedia_index_data1.json";
+ private static final String WIKIPEDIA_DATA_2 = "wikipedia_index_data2.json";
+ private static final String WIKIPEDIA_DATA_3 = "wikipedia_index_data3.json";
+ private static final ImmutableList INPUT_SOURCE_OBJECTS_VALUE =
ImmutableList.of
+ (
+ ImmutableMap.of("bucket", "%%BUCKET%%", "path", "%%PATH%%" +
WIKIPEDIA_DATA_1),
+ ImmutableMap.of("bucket", "%%BUCKET%%", "path", "%%PATH%%" +
WIKIPEDIA_DATA_2),
+ ImmutableMap.of("bucket", "%%BUCKET%%", "path", "%%PATH%%" +
WIKIPEDIA_DATA_3)
+ );
+
+ @Test
+ public void testS3WithValidOverrideCredentialsIndexDataShouldSucceed()
throws Exception
+ {
+ try (
+ final Closeable ignored1 = unloader(INDEX_DATASOURCE +
config.getExtraDatasourceNameSuffix());
+ ) {
+ final Function<String, String> s3PropsTransform = spec -> {
+ try {
+ String inputSourceValue =
jsonMapper.writeValueAsString(INPUT_SOURCE_OBJECTS_VALUE);
+ inputSourceValue = StringUtils.replace(
+ inputSourceValue,
+ "%%BUCKET%%",
+ config.getCloudBucket()
+ );
+ inputSourceValue = StringUtils.replace(
+ inputSourceValue,
+ "%%PATH%%",
+ config.getCloudPath()
+ );
+
+ spec = StringUtils.replace(
+ spec,
+ "%%ACCESS_KEY_PROPERTY_VALUE%%",
+ jsonMapper.writeValueAsString(
+ ImmutableMap.of("type", "environment", "variable",
"OVERRIDE_S3_ACCESS_KEY")
+ )
+ );
+ spec = StringUtils.replace(
+ spec,
+ "%%SECRET_KEY_PROPERTY_VALUE%%",
+ jsonMapper.writeValueAsString(
+ ImmutableMap.of("type", "environment", "variable",
"OVERRIDE_S3_SECRET_KEY")
+ )
+ );
+
+ spec = StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_TYPE%%",
+ "s3"
+ );
+ spec = StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_PROPERTY_KEY%%",
+ INPUT_SOURCE_OBJECTS_KEY
+ );
+ return StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_PROPERTY_VALUE%%",
+ inputSourceValue
+ );
+ }
+ catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+
+ doIndexTest(
+ INDEX_DATASOURCE,
+ INDEX_TASK_WITH_OVERRIDE,
+ s3PropsTransform,
+ INDEX_QUERIES_RESOURCE,
+ false,
+ true,
+ true
+ );
+ }
+ }
+
+ @Test
+ public void testS3WithoutOverrideCredentialsIndexDataShouldFailed() throws
Exception
+ {
+ try {
+ final Function<String, String> s3PropsTransform = spec -> {
+ try {
+ String inputSourceValue =
jsonMapper.writeValueAsString(INPUT_SOURCE_OBJECTS_VALUE);
+ inputSourceValue = StringUtils.replace(
+ inputSourceValue,
+ "%%BUCKET%%",
+ config.getCloudBucket()
+ );
+ inputSourceValue = StringUtils.replace(
+ inputSourceValue,
+ "%%PATH%%",
+ config.getCloudPath()
+ );
+
+ spec = StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_TYPE%%",
+ "s3"
+ );
+ spec = StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_PROPERTY_KEY%%",
+ INPUT_SOURCE_OBJECTS_KEY
+ );
+ return StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_PROPERTY_VALUE%%",
+ inputSourceValue
+ );
+ }
+ catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ final String fullDatasourceName = INDEX_DATASOURCE +
config.getExtraDatasourceNameSuffix();
+ final String taskSpec = s3PropsTransform.apply(
+ StringUtils.replace(
+ getResourceAsString(INDEX_TASK_WITHOUT_OVERRIDE),
+ "%%DATASOURCE%%",
+ fullDatasourceName
+ )
+ );
+ final String taskID = indexer.submitTask(taskSpec);
+ indexer.waitUntilTaskFails(taskID);
+ TaskStatusPlus taskStatusPlus = indexer.getTaskStatus(taskID);
+ // Index task is expected to fail as the default S3 Credentials in
Druid's config (druid.s3.accessKey and
+ // druid.s3.secretKey should not have access to the bucket and path for
our data. (Refer to the setup instruction
+ // at the top of this test class.
+ Assert.assertEquals(taskStatusPlus.getStatusCode(), TaskState.FAILED);
+ Assert.assertNotNull(taskStatusPlus.getErrorMsg());
+ Assert.assertTrue(
+
taskStatusPlus.getErrorMsg().contains("com.amazonaws.services.s3.model.AmazonS3Exception"),
+ "Expect task to fail with AmazonS3Exception");
+ }
+ finally {
+ // If the test pass, then there is no datasource to unload
+ closeQuietly(unloader(INDEX_DATASOURCE +
config.getExtraDatasourceNameSuffix()));
+ }
+ }
+
+ @Test
+ public void testS3WithInvalidOverrideCredentialsIndexDataShouldFailed()
throws Exception
+ {
+ try {
+ final Function<String, String> s3PropsTransform = spec -> {
+ try {
+ String inputSourceValue =
jsonMapper.writeValueAsString(INPUT_SOURCE_OBJECTS_VALUE);
+ inputSourceValue = StringUtils.replace(
+ inputSourceValue,
+ "%%BUCKET%%",
+ config.getCloudBucket()
+ );
+ inputSourceValue = StringUtils.replace(
+ inputSourceValue,
+ "%%PATH%%",
+ config.getCloudPath()
+ );
+
+ spec = StringUtils.replace(
+ spec,
+ "%%ACCESS_KEY_PROPERTY_VALUE%%",
+ jsonMapper.writeValueAsString(
+ ImmutableMap.of("type", "environment", "variable",
"NON_EXISTENT_INVALID_ENV_VAR")
+ )
+ );
+ spec = StringUtils.replace(
+ spec,
+ "%%SECRET_KEY_PROPERTY_VALUE%%",
+ jsonMapper.writeValueAsString(
+ ImmutableMap.of("type", "environment", "variable",
"NON_EXISTENT_INVALID_ENV_VAR")
+ )
+ );
+
+ spec = StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_TYPE%%",
+ "s3"
+ );
+ spec = StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_PROPERTY_KEY%%",
+ INPUT_SOURCE_OBJECTS_KEY
+ );
+ return StringUtils.replace(
+ spec,
+ "%%INPUT_SOURCE_PROPERTY_VALUE%%",
+ inputSourceValue
+ );
+ }
+ catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+
+ final String fullDatasourceName = INDEX_DATASOURCE +
config.getExtraDatasourceNameSuffix();
+ final String taskSpec = s3PropsTransform.apply(
+ StringUtils.replace(
+ getResourceAsString(INDEX_TASK_WITH_OVERRIDE),
+ "%%DATASOURCE%%",
+ fullDatasourceName
+ )
+ );
+ final String taskID = indexer.submitTask(taskSpec);
+ indexer.waitUntilTaskFails(taskID);
+ TaskStatusPlus taskStatusPlus = indexer.getTaskStatus(taskID);
+ // Index task is expected to fail as the overrided s3 access key and s3
secret key cannot be null
+ Assert.assertEquals(taskStatusPlus.getStatusCode(), TaskState.FAILED);
+ Assert.assertNotNull(taskStatusPlus.getErrorMsg());
+ Assert.assertTrue(
+ taskStatusPlus.getErrorMsg().contains("IllegalArgumentException:
Access key cannot be null"),
+ "Expect task to fail with IllegalArgumentException: Access key
cannot be null");
+ }
+ finally {
+ // If the test pass, then there is no datasource to unload
+ closeQuietly(unloader(INDEX_DATASOURCE +
config.getExtraDatasourceNameSuffix()));
+ }
+ }
+
+ private void closeQuietly(Closeable closeable)
+ {
+ try {
+ if (closeable != null) {
+ closeable.close();
+ }
+ }
+ catch (Exception var2) {
+ }
+ }
+}
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToHdfsParallelIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToHdfsParallelIndexTest.java
index 6fb07cd..86782f4 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToHdfsParallelIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToHdfsParallelIndexTest.java
@@ -41,7 +41,7 @@ import java.util.List;
*/
@Test(groups = TestNGGroup.HDFS_DEEP_STORAGE)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITS3ToHdfsParallelIndexTest extends
AbstractS3InputSourceSimpleIndexTest
+public class ITS3ToHdfsParallelIndexTest extends
AbstractS3InputSourceParallelIndexTest
{
@Test(dataProvider = "resources")
public void testS3IndexData(Pair<String, List> s3InputSource) throws
Exception
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToS3ParallelIndexTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToS3ParallelIndexTest.java
index 6d20907..dd52c9a 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToS3ParallelIndexTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITS3ToS3ParallelIndexTest.java
@@ -39,7 +39,7 @@ import java.util.List;
*/
@Test(groups = TestNGGroup.S3_DEEP_STORAGE)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITS3ToS3ParallelIndexTest extends
AbstractS3InputSourceSimpleIndexTest
+public class ITS3ToS3ParallelIndexTest extends
AbstractS3InputSourceParallelIndexTest
{
@Test(dataProvider = "resources")
public void testS3IndexData(Pair<String, List> s3InputSource) throws
Exception
diff --git
a/integration-tests/src/test/resources/indexer/wikipedia_override_credentials_index_task.json
b/integration-tests/src/test/resources/indexer/wikipedia_override_credentials_index_task.json
new file mode 100644
index 0000000..3db2dcc
--- /dev/null
+++
b/integration-tests/src/test/resources/indexer/wikipedia_override_credentials_index_task.json
@@ -0,0 +1,85 @@
+{
+ "type": "index",
+ "spec": {
+ "dataSchema": {
+ "dataSource": "%%DATASOURCE%%",
+ "timestampSpec": {
+ "column": "timestamp"
+ },
+ "dimensionsSpec": {
+ "dimensions": [
+ "page",
+ {"type": "string", "name": "language", "createBitmapIndex": false},
+ "user",
+ "unpatrolled",
+ "newPage",
+ "robot",
+ "anonymous",
+ "namespace",
+ "continent",
+ "country",
+ "region",
+ "city"
+ ]
+ },
+ "metricsSpec": [
+ {
+ "type": "count",
+ "name": "count"
+ },
+ {
+ "type": "doubleSum",
+ "name": "added",
+ "fieldName": "added"
+ },
+ {
+ "type": "doubleSum",
+ "name": "deleted",
+ "fieldName": "deleted"
+ },
+ {
+ "type": "doubleSum",
+ "name": "delta",
+ "fieldName": "delta"
+ },
+ {
+ "name": "thetaSketch",
+ "type": "thetaSketch",
+ "fieldName": "user"
+ },
+ {
+ "name": "quantilesDoublesSketch",
+ "type": "quantilesDoublesSketch",
+ "fieldName": "delta"
+ },
+ {
+ "name": "HLLSketchBuild",
+ "type": "HLLSketchBuild",
+ "fieldName": "user"
+ }
+ ],
+ "granularitySpec": {
+ "segmentGranularity": "DAY",
+ "queryGranularity": "second",
+ "intervals" : [ "2013-08-31/2013-09-02" ]
+ }
+ },
+ "ioConfig": {
+ "type": "index",
+ "inputSource": {
+ "type": "%%INPUT_SOURCE_TYPE%%",
+ "properties": {
+ "accessKeyId": %%ACCESS_KEY_PROPERTY_VALUE%%,
+ "secretAccessKey": %%SECRET_KEY_PROPERTY_VALUE%%
+ },
+ "%%INPUT_SOURCE_PROPERTY_KEY%%": %%INPUT_SOURCE_PROPERTY_VALUE%%
+ },
+ "inputFormat": {
+ "type": "json"
+ }
+ },
+ "tuningConfig": {
+ "type": "index"
+ }
+ }
+}
\ No newline at end of file
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]