This is an automated email from the ASF dual-hosted git repository.
nehapawar pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git
The following commit(s) were added to refs/heads/master by this push:
new febdcbc Use resourceAsStream to fetch resources in tests (#7181)
febdcbc is described below
commit febdcbc3552a6b6c3a4b02633676ebdc96ba1398
Author: Neha Pawar <[email protected]>
AuthorDate: Tue Jul 20 19:55:40 2021 -0700
Use resourceAsStream to fetch resources in tests (#7181)
---
.../pinot/common/utils/TarGzCompressionUtils.java | 14 +++++++--
.../tests/BaseClusterIntegrationTest.java | 33 +++++++++++-----------
.../tests/BaseClusterIntegrationTestSet.java | 19 ++++++-------
3 files changed, 37 insertions(+), 29 deletions(-)
diff --git
a/pinot-common/src/main/java/org/apache/pinot/common/utils/TarGzCompressionUtils.java
b/pinot-common/src/main/java/org/apache/pinot/common/utils/TarGzCompressionUtils.java
index bb2da81..b209044 100644
---
a/pinot-common/src/main/java/org/apache/pinot/common/utils/TarGzCompressionUtils.java
+++
b/pinot-common/src/main/java/org/apache/pinot/common/utils/TarGzCompressionUtils.java
@@ -100,10 +100,20 @@ public class TarGzCompressionUtils {
*/
public static List<File> untar(File inputFile, File outputDir)
throws IOException {
+ try (InputStream fileIn = Files.newInputStream(inputFile.toPath())) {
+ return untar(fileIn, outputDir);
+ }
+ }
+
+ /**
+ * Un-tars an inputstream of a tar.gz file into a directory, returns all the
untarred files/directories.
+ * <p>For security reason, the untarred files must reside in the output
directory.
+ */
+ public static List<File> untar(InputStream inputStream, File outputDir)
+ throws IOException {
String outputDirCanonicalPath = outputDir.getCanonicalPath();
List<File> untarredFiles = new ArrayList<>();
- try (InputStream fileIn = Files.newInputStream(inputFile.toPath());
- InputStream bufferedIn = new BufferedInputStream(fileIn);
+ try (InputStream bufferedIn = new BufferedInputStream(inputStream);
InputStream gzipIn = new GzipCompressorInputStream(bufferedIn);
ArchiveInputStream tarGzIn = new TarArchiveInputStream(gzipIn)) {
ArchiveEntry entry;
diff --git
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTest.java
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTest.java
index ca1cc89..e8c37f7 100644
---
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTest.java
+++
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTest.java
@@ -21,7 +21,7 @@ package org.apache.pinot.integration.tests;
import com.google.common.base.Function;
import java.io.File;
import java.io.IOException;
-import java.net.URL;
+import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.ArrayList;
@@ -271,9 +271,10 @@ public abstract class BaseClusterIntegrationTest extends
ClusterTest {
*/
protected Schema createSchema()
throws IOException {
- URL resourceUrl =
BaseClusterIntegrationTest.class.getClassLoader().getResource(getSchemaFileName());
- Assert.assertNotNull(resourceUrl);
- return Schema.fromFile(new File(resourceUrl.getFile()));
+ InputStream inputStream =
+
BaseClusterIntegrationTest.class.getClassLoader().getResourceAsStream(getSchemaFileName());
+ Assert.assertNotNull(inputStream);
+ return Schema.fromInputSteam(inputStream);
}
/**
@@ -378,11 +379,10 @@ public abstract class BaseClusterIntegrationTest extends
ClusterTest {
columnPartitionConfigMap.put(primaryKeyColumn, new
ColumnPartitionConfig("Murmur", numPartitions));
return new
TableConfigBuilder(TableType.REALTIME).setTableName(getTableName()).setSchemaName(getSchemaName())
- .setTimeColumnName(getTimeColumnName())
-
.setFieldConfigList(getFieldConfigs()).setNumReplicas(getNumReplicas()).setSegmentVersion(getSegmentVersion())
-
.setLoadMode(getLoadMode()).setTaskConfig(getTaskConfig()).setBrokerTenant(getBrokerTenant())
-
.setServerTenant(getServerTenant()).setIngestionConfig(getIngestionConfig()).setLLC(useLlc())
-
.setStreamConfigs(getStreamConfigs()).setNullHandlingEnabled(getNullHandlingEnabled())
+
.setTimeColumnName(getTimeColumnName()).setFieldConfigList(getFieldConfigs()).setNumReplicas(getNumReplicas())
+
.setSegmentVersion(getSegmentVersion()).setLoadMode(getLoadMode()).setTaskConfig(getTaskConfig())
+
.setBrokerTenant(getBrokerTenant()).setServerTenant(getServerTenant()).setIngestionConfig(getIngestionConfig())
+
.setLLC(useLlc()).setStreamConfigs(getStreamConfigs()).setNullHandlingEnabled(getNullHandlingEnabled())
.setRoutingConfig(new RoutingConfig(null, null,
RoutingConfig.STRICT_REPLICA_GROUP_INSTANCE_SELECTOR_TYPE))
.setSegmentPartitionConfig(new
SegmentPartitionConfig(columnPartitionConfigMap))
.setReplicaGroupStrategyConfig(new
ReplicaGroupStrategyConfig(primaryKeyColumn, 1))
@@ -457,9 +457,10 @@ public abstract class BaseClusterIntegrationTest extends
ClusterTest {
*/
protected List<File> unpackAvroData(File outputDir)
throws Exception {
- URL resourceUrl =
BaseClusterIntegrationTest.class.getClassLoader().getResource(getAvroTarFileName());
- Assert.assertNotNull(resourceUrl);
- return TarGzCompressionUtils.untar(new File(resourceUrl.getFile()),
outputDir);
+ InputStream inputStream =
+
BaseClusterIntegrationTest.class.getClassLoader().getResourceAsStream(getAvroTarFileName());
+ Assert.assertNotNull(inputStream);
+ return TarGzCompressionUtils.untar(inputStream, outputDir);
}
/**
@@ -470,8 +471,9 @@ public abstract class BaseClusterIntegrationTest extends
ClusterTest {
protected void pushAvroIntoKafka(List<File> avroFiles)
throws Exception {
- ClusterIntegrationTestUtils.pushAvroIntoKafka(avroFiles, "localhost:" +
getKafkaPort(), getKafkaTopic(),
- getMaxNumKafkaMessagesPerBatch(), getKafkaMessageHeader(),
getPartitionColumn());
+ ClusterIntegrationTestUtils
+ .pushAvroIntoKafka(avroFiles, "localhost:" + getKafkaPort(),
getKafkaTopic(), getMaxNumKafkaMessagesPerBatch(),
+ getKafkaMessageHeader(), getPartitionColumn());
}
protected List<File> getAllAvroFiles()
@@ -511,8 +513,7 @@ public abstract class BaseClusterIntegrationTest extends
ClusterTest {
protected void startKafka(int port) {
Properties kafkaConfig = KafkaStarterUtils.getDefaultKafkaConfiguration();
- _kafkaStarters = KafkaStarterUtils.startServers(getNumKafkaBrokers(),
port, getKafkaZKAddress(),
- kafkaConfig);
+ _kafkaStarters = KafkaStarterUtils.startServers(getNumKafkaBrokers(),
port, getKafkaZKAddress(), kafkaConfig);
_kafkaStarters.get(0)
.createTopic(getKafkaTopic(),
KafkaStarterUtils.getTopicCreationProps(getNumKafkaPartitions()));
}
diff --git
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTestSet.java
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTestSet.java
index b1e2b95..3059f1d 100644
---
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTestSet.java
+++
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/BaseClusterIntegrationTestSet.java
@@ -20,9 +20,8 @@ package org.apache.pinot.integration.tests;
import com.fasterxml.jackson.databind.JsonNode;
import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.net.URL;
+import java.io.InputStream;
+import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -360,11 +359,10 @@ public abstract class BaseClusterIntegrationTestSet
extends BaseClusterIntegrati
*/
public void testQueriesFromQueryFile()
throws Exception {
- URL resourceUrl =
BaseClusterIntegrationTestSet.class.getClassLoader().getResource(getQueryFileName());
- assertNotNull(resourceUrl);
- File queryFile = new File(resourceUrl.getFile());
+ InputStream inputStream =
BaseClusterIntegrationTestSet.class.getClassLoader().getResourceAsStream(getQueryFileName());
+ assertNotNull(inputStream);
- try (BufferedReader reader = new BufferedReader(new
FileReader(queryFile))) {
+ try (BufferedReader reader = new BufferedReader(new
InputStreamReader(inputStream))) {
String queryString;
while ((queryString = reader.readLine()) != null) {
// Skip commented line and empty line.
@@ -391,11 +389,10 @@ public abstract class BaseClusterIntegrationTestSet
extends BaseClusterIntegrati
*/
public void testSqlQueriesFromQueryFile()
throws Exception {
- URL resourceUrl =
BaseClusterIntegrationTestSet.class.getClassLoader().getResource(getSqlQueryFileName());
- assertNotNull(resourceUrl);
- File queryFile = new File(resourceUrl.getFile());
+ InputStream inputStream =
BaseClusterIntegrationTestSet.class.getClassLoader().getResourceAsStream(getSqlQueryFileName());
+ assertNotNull(inputStream);
- try (BufferedReader reader = new BufferedReader(new
FileReader(queryFile))) {
+ try (BufferedReader reader = new BufferedReader(new
InputStreamReader(inputStream))) {
String queryString;
while ((queryString = reader.readLine()) != null) {
// Skip commented line and empty line.
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]