This is an automated email from the ASF dual-hosted git repository.

kfaraz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 98312214dca Migrate oss integration test to embedded test framework 
(#19159)
98312214dca is described below

commit 98312214dcae724d1bb573eba4f8449d8c5f2fa2
Author: Clint Wylie <[email protected]>
AuthorDate: Sun Mar 15 20:40:57 2026 -0700

    Migrate oss integration test to embedded test framework (#19159)
    
    These tests are run against an actual Alibaba OSS cloud storage against a 
mock or a testcontainer.
    As such, they are disabled by default and can be run only when the required 
credentials are provided.
---
 embedded-tests/pom.xml                             |  18 +++
 .../AbstractOssInputSourceParallelIndexTest.java   | 114 +++++++++++++++++
 .../testing/embedded/oss/OssStorageResource.java   | 135 ++++++++++++++++++++
 .../druid/testing/embedded/oss/OssTestUtil.java    | 125 +++++++++++++++++++
 .../embedded/oss/OssToOssParallelIndexTest.java    |  44 +++++++
 .../java/org/apache/druid/tests/TestNGGroup.java   |   7 --
 .../AbstractOssInputSourceParallelIndexTest.java   | 136 ---------------------
 .../tests/indexer/ITOssToOssParallelIndexTest.java |  49 --------
 8 files changed, 436 insertions(+), 192 deletions(-)

diff --git a/embedded-tests/pom.xml b/embedded-tests/pom.xml
index 671d0097a21..dc5ca44937d 100644
--- a/embedded-tests/pom.xml
+++ b/embedded-tests/pom.xml
@@ -310,6 +310,12 @@
       <version>${project.parent.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.druid.extensions.contrib</groupId>
+      <artifactId>aliyun-oss-extensions</artifactId>
+      <version>${project.parent.version}</version>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>org.apache.druid.extensions</groupId>
       <artifactId>druid-parquet-extensions</artifactId>
@@ -620,6 +626,18 @@
       <version>0.13.0</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.aliyun.oss</groupId>
+      <artifactId>aliyun-sdk-oss</artifactId>
+      <version>3.11.3</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.xml.bind</groupId>
+          <artifactId>jaxb-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
   </dependencies>
 
   <build>
diff --git 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/AbstractOssInputSourceParallelIndexTest.java
 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/AbstractOssInputSourceParallelIndexTest.java
new file mode 100644
index 00000000000..e23ba61618b
--- /dev/null
+++ 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/AbstractOssInputSourceParallelIndexTest.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.embedded.oss;
+
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.testing.embedded.EmbeddedDruidCluster;
+import 
org.apache.druid.testing.embedded.indexer.AbstractCloudInputSourceParallelIndexTest;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
+/**
+ * Abstract base for embedded OSS-to-OSS parallel index tests. These tests 
require real
+ * Alibaba Cloud OSS credentials and are skipped when the {@link 
java.util.Properties} are not set.
+ *
+ * <p>Set the following runtime properties to enable the tests:
+ * <ul>
+ *   <li>{@code druid.testing.oss.access} – Aliyun access key ID</li>
+ *   <li>{@code druid.testing.oss.secret} – Aliyun secret access key</li>
+ *   <li>{@code druid.testing.oss.endpoint} – OSS endpoint (e.g. {@code 
oss-cn-hangzhou.aliyuncs.com})</li>
+ *   <li>{@code druid.testing.oss.bucket} – Bucket for deep storage and test 
data</li>
+ *   <li>{@code druid.testing.oss.path} – Optional key prefix for test input 
data (default: {@code path})</li>
+ * </ul>
+ */
+public abstract class AbstractOssInputSourceParallelIndexTest extends 
AbstractCloudInputSourceParallelIndexTest
+{
+  private static final Logger LOG = new 
Logger(AbstractOssInputSourceParallelIndexTest.class);
+
+  private final OssStorageResource ossStorageResource = new 
OssStorageResource();
+  private OssTestUtil oss;
+
+  @Override
+  protected void addResources(EmbeddedDruidCluster cluster)
+  {
+    cluster.addResource(ossStorageResource);
+  }
+
+  @Override
+  public String getCloudBucket(String inputSourceType)
+  {
+    return ossStorageResource.getBucket();
+  }
+
+  @Override
+  public String getCloudPath(String inputSourceType)
+  {
+    return ossStorageResource.getPath();
+  }
+
+  @BeforeAll
+  public void setupOss()
+  {
+    assumeTrue(
+        ossStorageResource.isConfigured(),
+        "Aliyun OSS credentials not configured. Set druid.testing.oss.access, 
druid.testing.oss.secret, "
+        + "druid.testing.oss.endpoint, and druid.testing.oss.bucket to run 
these tests."
+    );
+    final List<String> filesToUpload = new ArrayList<>();
+    final String localPath = "data/json/";
+    for (String file : fileList()) {
+      filesToUpload.add(localPath + file);
+    }
+    try {
+      oss = new OssTestUtil(
+          ossStorageResource.buildOssClient(),
+          ossStorageResource.getBucket(),
+          ossStorageResource.getPath()
+      );
+      oss.uploadDataFilesToOss(filesToUpload);
+    }
+    catch (Exception e) {
+      LOG.error(e, "Unable to upload files to OSS");
+      throw new RuntimeException(e);
+    }
+  }
+
+  @AfterEach
+  public void deleteSegmentsFromOss()
+  {
+    if (oss != null) {
+      oss.deleteFolderFromOss(dataSource);
+    }
+  }
+
+  @AfterAll
+  public void deleteDataFilesFromOss()
+  {
+    if (oss != null) {
+      oss.deleteFilesFromOss(fileList());
+    }
+  }
+}
diff --git 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssStorageResource.java
 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssStorageResource.java
new file mode 100644
index 00000000000..48357069d1b
--- /dev/null
+++ 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssStorageResource.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.embedded.oss;
+
+import com.aliyun.oss.OSS;
+import com.aliyun.oss.OSSClientBuilder;
+import org.apache.druid.common.config.Configs;
+import org.apache.druid.data.input.aliyun.OssInputSourceDruidModule;
+import org.apache.druid.storage.aliyun.OssStorageDruidModule;
+import org.apache.druid.testing.embedded.EmbeddedDruidCluster;
+import org.apache.druid.testing.embedded.EmbeddedResource;
+
+import javax.annotation.Nullable;
+
+/**
+ * Configures the embedded cluster to use Alibaba Cloud OSS for deep storage 
of segments
+ * and task logs. Credentials and endpoint are read from {@link 
java.util.Properties}:
+ * <ul>
+ *   <li>{@code druid.testing.oss.access} – Aliyun access key ID</li>
+ *   <li>{@code druid.testing.oss.secret} – Aliyun secret access key</li>
+ *   <li>{@code druid.testing.oss.endpoint} – OSS endpoint (e.g. {@code 
oss-cn-hangzhou.aliyuncs.com})</li>
+ *   <li>{@code druid.testing.oss.bucket} – Bucket for deep storage and test 
data</li>
+ *   <li>{@code druid.testing.oss.path} – Optional key prefix for test input 
data (default: {@code path})</li>
+ * </ul>
+ * Tests using this resource skip themselves when the runtime properties are 
not set.
+ */
+public class OssStorageResource implements EmbeddedResource
+{
+  public static final String ACCESS_KEY_PROPERTY = "druid.testing.oss.access";
+  public static final String SECRET_KEY_PROPERTY = "druid.testing.oss.secret";
+  public static final String ENDPOINT_PROPERTY = "druid.testing.oss.endpoint";
+  public static final String BUCKET_PROPERTY = "druid.testing.oss.bucket";
+  public static final String PATH_PROPERTY = "druid.testing.oss.path";
+
+  @Override
+  public void start()
+  {
+    // No container to start; configuration comes from system properties.
+  }
+
+  @Override
+  public void stop()
+  {
+    // Nothing to tear down.
+  }
+
+  @Override
+  public void onStarted(EmbeddedDruidCluster cluster)
+  {
+    if (!isConfigured()) {
+      // Credentials not available; tests will be skipped via assumeTrue() in 
@BeforeAll.
+      // Do not configure OSS on the cluster to avoid startup failures from 
missing properties.
+      return;
+    }
+
+    cluster.addExtension(OssStorageDruidModule.class);
+    cluster.addExtension(OssInputSourceDruidModule.class);
+
+    // Deep storage
+    cluster.addCommonProperty("druid.storage.type", "oss");
+    cluster.addCommonProperty("druid.storage.oss.bucket", getBucket());
+    cluster.addCommonProperty("druid.storage.oss.prefix", "druid/segments");
+
+    // Indexer task logs
+    cluster.addCommonProperty("druid.indexer.logs.type", "oss");
+    cluster.addCommonProperty("druid.indexer.logs.oss.bucket", getBucket());
+    cluster.addCommonProperty("druid.indexer.logs.oss.prefix", 
"druid/indexing-logs");
+
+    // OSS client credentials
+    cluster.addCommonProperty("druid.oss.endpoint", getEndpoint());
+    cluster.addCommonProperty("druid.oss.accessKey", getAccessKey());
+    cluster.addCommonProperty("druid.oss.secretKey", getSecretKey());
+  }
+
+  public OSS buildOssClient()
+  {
+    return new OSSClientBuilder().build(getEndpoint(), getAccessKey(), 
getSecretKey());
+  }
+
+  @Nullable
+  public static String getProperty(String name)
+  {
+    return System.getProperty(name);
+  }
+
+  public boolean isConfigured()
+  {
+    return getAccessKey() != null
+           && getSecretKey() != null
+           && getEndpoint() != null
+           && getBucket() != null;
+  }
+
+  public String getAccessKey()
+  {
+    return getProperty(ACCESS_KEY_PROPERTY);
+  }
+
+  public String getSecretKey()
+  {
+    return getProperty(SECRET_KEY_PROPERTY);
+  }
+
+  public String getEndpoint()
+  {
+    return getProperty(ENDPOINT_PROPERTY);
+  }
+
+  public String getBucket()
+  {
+    return getProperty(BUCKET_PROPERTY);
+  }
+
+  public String getPath()
+  {
+    return Configs.valueOrDefault(getProperty(PATH_PROPERTY), "path");
+  }
+}
diff --git 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssTestUtil.java
 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssTestUtil.java
new file mode 100644
index 00000000000..6ea5652f1cc
--- /dev/null
+++ 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssTestUtil.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.embedded.oss;
+
+import com.aliyun.oss.OSS;
+import com.aliyun.oss.model.DeleteObjectsRequest;
+import com.aliyun.oss.model.ListObjectsRequest;
+import com.aliyun.oss.model.OSSObjectSummary;
+import com.aliyun.oss.model.ObjectListing;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.testing.embedded.indexing.Resources;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class OssTestUtil
+{
+  private static final Logger LOG = new Logger(OssTestUtil.class);
+
+  private final OSS ossClient;
+  private final String bucket;
+  private final String path;
+
+  public OssTestUtil(OSS ossClient, String bucket, String path)
+  {
+    this.ossClient = ossClient;
+    this.bucket = bucket;
+    this.path = path;
+  }
+
+  /**
+   * Uploads a list of local resource-relative files to OSS at the configured 
bucket and path prefix.
+   *
+   * @param localFiles resource-relative paths to upload (e.g. {@code 
"data/json/tiny_wiki_1.json"})
+   */
+  public void uploadDataFilesToOss(List<String> localFiles)
+  {
+    List<String> ossObjectPaths = new ArrayList<>();
+    for (String file : localFiles) {
+      final String ossKey = path + "/" + file.substring(file.lastIndexOf('/') 
+ 1);
+      ossObjectPaths.add(ossKey);
+      try {
+        ossClient.putObject(bucket, ossKey, 
Resources.getFileForResource(file));
+        LOG.info("Uploaded [%s] to oss://%s/%s", file, bucket, ossKey);
+      }
+      catch (Exception e) {
+        LOG.error(e, "Unable to upload file [%s] to OSS", file);
+        deleteFilesFromOss(ossObjectPaths);
+        throw e;
+      }
+    }
+  }
+
+  /**
+   * Deletes the given filenames from the configured bucket and path prefix.
+   *
+   * @param fileNames bare file names (without path) to delete
+   */
+  public void deleteFilesFromOss(List<String> fileNames)
+  {
+    try {
+      final List<String> keys = new ArrayList<>();
+      for (String fileName : fileNames) {
+        keys.add(path + "/" + fileName);
+      }
+      final DeleteObjectsRequest request = new DeleteObjectsRequest(bucket);
+      request.setKeys(keys);
+      ossClient.deleteObjects(request);
+    }
+    catch (Exception e) {
+      LOG.warn(e, "Unable to delete data files from OSS");
+    }
+  }
+
+  /**
+   * Deletes all objects under the configured path prefix that belong to the 
given datasource folder.
+   *
+   * @param datasource folder to delete from OSS (typically the test 
datasource name)
+   */
+  public void deleteFolderFromOss(String datasource)
+  {
+    try {
+      final ListObjectsRequest listRequest = new ListObjectsRequest(bucket);
+      listRequest.setPrefix(path + "/" + datasource + "/");
+      ObjectListing listing = ossClient.listObjects(listRequest);
+      while (true) {
+        final List<String> keys = new ArrayList<>();
+        for (OSSObjectSummary summary : listing.getObjectSummaries()) {
+          keys.add(summary.getKey());
+        }
+        if (!keys.isEmpty()) {
+          final DeleteObjectsRequest deleteRequest = new 
DeleteObjectsRequest(bucket);
+          deleteRequest.setKeys(keys);
+          ossClient.deleteObjects(deleteRequest);
+        }
+        if (listing.isTruncated()) {
+          listRequest.setMarker(listing.getNextMarker());
+          listing = ossClient.listObjects(listRequest);
+        } else {
+          break;
+        }
+      }
+    }
+    catch (Exception e) {
+      LOG.warn(e, "Unable to delete folder from OSS");
+    }
+  }
+}
diff --git 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssToOssParallelIndexTest.java
 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssToOssParallelIndexTest.java
new file mode 100644
index 00000000000..333f7fd19d9
--- /dev/null
+++ 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/oss/OssToOssParallelIndexTest.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.embedded.oss;
+
+import org.apache.druid.java.util.common.Pair;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import java.util.List;
+
+/**
+ * Embedded parallel index test that reads from and writes to Alibaba Cloud 
OSS.
+ * These tests are skipped unless the {@link java.util.Properties} described in
+ * {@link AbstractOssInputSourceParallelIndexTest} are set.
+ * <p>
+ * Disclaimer: this test has never been run - it was ported from the legacy 
integration-tests module to the 'embedded'
+ * Druid cluster test framework.
+ */
+public class OssToOssParallelIndexTest extends 
AbstractOssInputSourceParallelIndexTest
+{
+  @ParameterizedTest
+  @MethodSource("resources")
+  public void testOssIndexData(Pair<String, List<?>> ossInputSource) throws 
Exception
+  {
+    doTest(ossInputSource, new Pair<>(false, false), "oss", null);
+  }
+}
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java 
b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
index 178e55cb2e9..b9f52e3375d 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
@@ -49,13 +49,6 @@ public class TestNGGroup
    */
   public static final String AZURE_DEEP_STORAGE = "azure-deep-storage";
 
-  /**
-   * This group is not part of CI. To run this group, azure 
configs/credentials for your oss must be provided in a file.
-   * The path of the file must then be pass to mvn with 
-Doverride.config.path=<PATH_TO_FILE>
-   * See integration-tests/docker/environment-configs/override-examples/oss 
for env vars to provide.
-   */
-  public static final String ALIYUN_OSS_DEEP_STORAGE = 
"aliyun-oss-deep-storage";
-
   /**
    * This group is not part of CI. To run this group, hadoop configs must be 
provided in a file. The path of the file
    * must then be pass to mvn with -Doverride.config.path=<PATH_TO_FILE>
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractOssInputSourceParallelIndexTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractOssInputSourceParallelIndexTest.java
deleted file mode 100644
index 1d91d8e6b4f..00000000000
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractOssInputSourceParallelIndexTest.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.druid.tests.indexer;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
-import org.apache.druid.java.util.common.Pair;
-import org.apache.druid.java.util.common.StringUtils;
-import org.testng.annotations.DataProvider;
-
-import java.io.Closeable;
-import java.util.List;
-import java.util.UUID;
-import java.util.function.Function;
-
-public abstract class AbstractOssInputSourceParallelIndexTest extends 
AbstractITBatchIndexTest
-{
-  private static final String INDEX_TASK = 
"/indexer/wikipedia_cloud_index_task.json";
-  private static final String INDEX_QUERIES_RESOURCE = 
"/indexer/wikipedia_index_queries.json";
-  private static final String INPUT_SOURCE_URIS_KEY = "uris";
-  private static final String INPUT_SOURCE_PREFIXES_KEY = "prefixes";
-  private static final String INPUT_SOURCE_OBJECTS_KEY = "objects";
-  private static final String WIKIPEDIA_DATA_1 = "wikipedia_index_data1.json";
-  private static final String WIKIPEDIA_DATA_2 = "wikipedia_index_data2.json";
-  private static final String WIKIPEDIA_DATA_3 = "wikipedia_index_data3.json";
-
-  @DataProvider
-  public static Object[][] resources()
-  {
-    return new Object[][]{
-        {new Pair<>(INPUT_SOURCE_URIS_KEY,
-                    ImmutableList.of(
-                        "oss://%%BUCKET%%/%%PATH%%" + WIKIPEDIA_DATA_1,
-                        "oss://%%BUCKET%%/%%PATH%%" + WIKIPEDIA_DATA_2,
-                        "oss://%%BUCKET%%/%%PATH%%" + WIKIPEDIA_DATA_3
-                    )
-        )},
-        {new Pair<>(INPUT_SOURCE_PREFIXES_KEY,
-                    ImmutableList.of(
-                        "oss://%%BUCKET%%/%%PATH%%"
-                    )
-        )},
-        {new Pair<>(INPUT_SOURCE_OBJECTS_KEY,
-                    ImmutableList.of(
-                        ImmutableMap.of("bucket", "%%BUCKET%%", "path", 
"%%PATH%%" + WIKIPEDIA_DATA_1),
-                        ImmutableMap.of("bucket", "%%BUCKET%%", "path", 
"%%PATH%%" + WIKIPEDIA_DATA_2),
-                        ImmutableMap.of("bucket", "%%BUCKET%%", "path", 
"%%PATH%%" + WIKIPEDIA_DATA_3)
-                    )
-        )}
-    };
-  }
-
-  void doTest(
-      Pair<String, List> inputSource,
-      Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair
-  ) throws Exception
-  {
-    final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
-    try (
-        final Closeable ignored1 = unloader(indexDatasource + 
config.getExtraDatasourceNameSuffix());
-    ) {
-      final Function<String, String> propsTransform = spec -> {
-        try {
-          String inputSourceValue = 
jsonMapper.writeValueAsString(inputSource.rhs);
-          inputSourceValue = StringUtils.replace(
-              inputSourceValue,
-              "%%BUCKET%%",
-              config.getCloudBucket()
-          );
-          inputSourceValue = StringUtils.replace(
-              inputSourceValue,
-              "%%PATH%%",
-              config.getCloudPath()
-          );
-          spec = StringUtils.replace(
-              spec,
-              "%%INPUT_FORMAT_TYPE%%",
-              InputFormatDetails.JSON.getInputFormatType()
-          );
-          spec = StringUtils.replace(
-              spec,
-              "%%PARTITIONS_SPEC%%",
-              jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, 
null))
-          );
-          spec = StringUtils.replace(
-              spec,
-              "%%INPUT_SOURCE_TYPE%%",
-              "oss"
-          );
-          spec = StringUtils.replace(
-              spec,
-              "%%INPUT_SOURCE_PROPERTY_KEY%%",
-              inputSource.lhs
-          );
-          return StringUtils.replace(
-              spec,
-              "%%INPUT_SOURCE_PROPERTY_VALUE%%",
-              inputSourceValue
-          );
-        }
-        catch (Exception e) {
-          throw new RuntimeException(e);
-        }
-      };
-
-      doIndexTest(
-          indexDatasource,
-          INDEX_TASK,
-          propsTransform,
-          INDEX_QUERIES_RESOURCE,
-          false,
-          true,
-          true,
-          segmentAvailabilityConfirmationPair
-      );
-    }
-  }
-}
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITOssToOssParallelIndexTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITOssToOssParallelIndexTest.java
deleted file mode 100644
index 003b41452a5..00000000000
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITOssToOssParallelIndexTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.druid.tests.indexer;
-
-import org.apache.druid.java.util.common.Pair;
-import org.apache.druid.testing.guice.DruidTestModuleFactory;
-import org.apache.druid.tests.TestNGGroup;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import java.util.List;
-
-/**
- * IMPORTANT:
- * To run this test, you must:
- * 1) Set the bucket and path for your data. This can be done by setting 
-Ddruid.test.config.cloudBucket and
- *    -Ddruid.test.config.cloudPath or setting "cloud_bucket" and "cloud_path" 
in the config file.
- * 2) Copy wikipedia_index_data1.json, wikipedia_index_data2.json, and 
wikipedia_index_data3.json
- *    located in integration-tests/src/test/resources/data/batch_index/json to 
your Aliyun OSS at the location set in step 1.
- * 3) Provide -Doverride.config.path=<PATH_TO_FILE> with Aliyun OSS 
credentials/configs set. See
- *    integration-tests/docker/environment-configs/override-examples/oss for 
env vars to provide.
- */
-@Test(groups = TestNGGroup.ALIYUN_OSS_DEEP_STORAGE)
-@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITOssToOssParallelIndexTest extends 
AbstractOssInputSourceParallelIndexTest
-{
-  @Test(dataProvider = "resources")
-  public void testAliyunOssIndexData(Pair<String, List> ossInputSource) throws 
Exception
-  {
-    doTest(ossInputSource, new Pair<>(false, false));
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to