This is an automated email from the ASF dual-hosted git repository.
ccaominh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git
The following commit(s) were added to refs/heads/master by this push:
new 1ced3b3 IntelliJ inspections cleanup (#9339)
1ced3b3 is described below
commit 1ced3b33fb1642ee71e542940e4e2a4840e7a48c
Author: Suneet Saldanha <[email protected]>
AuthorDate: Fri Apr 10 10:04:40 2020 -0700
IntelliJ inspections cleanup (#9339)
* IntelliJ inspections cleanup
* Standard Charset object can be used
* Redundant Collection.addAll() call
* String literal concatenation missing whitespace
* Statement with empty body
* Redundant Collection operation
* StringBuilder can be replaced with String
* Type parameter hides visible type
* fix warnings in test code
* more test fixes
* remove string concatenation inspection error
* fix extra curly brace
* cleanup AzureTestUtils
* fix charsets for RangerAdminClient
* review comments
---
.idea/inspectionProfiles/Druid.xml | 9 +-
.../druid/benchmark/query/GroupByBenchmark.java | 2 +-
.../druid/benchmark/query/SearchBenchmark.java | 2 +-
.../druid/benchmark/query/TimeseriesBenchmark.java | 2 +-
.../druid/benchmark/query/TopNBenchmark.java | 2 +-
.../query/timecompare/TimeCompareBenchmark.java | 4 +-
.../java/util/http/client/JankyServersTest.java | 1 +
.../io/AppendableByteArrayInputStreamTest.java | 1 +
.../emitter/graphite/WhiteListBasedConverter.java | 3 +-
.../apache/druid/storage/azure/AzureTestUtils.java | 3 +-
.../hll/HllSketchUnionPostAggregator.java | 4 +-
.../datasketches/theta/SketchAggregationTest.java | 4 +-
.../theta/SketchAggregationWithSimpleDataTest.java | 4 +-
.../theta/oldapi/OldApiSketchAggregationTest.java | 4 +-
.../ranger/authorizer/RangerAdminClientImpl.java | 3 +-
.../histogram/FixedBucketsHistogramTest.java | 35 --
.../druid/storage/s3/S3DataSegmentPullerTest.java | 5 +-
.../java/org/apache/druid/indexer/JobHelper.java | 18 +-
.../druid/indexer/IndexGeneratorJobTest.java | 555 ++++++++++-----------
...PendingTaskBasedWorkerProvisioningStrategy.java | 2 +-
.../supervisor/SeekableStreamSupervisor.java | 4 +-
.../druid/tests/indexer/AbstractIndexerTest.java | 3 +-
.../druid/query/groupby/having/AndHavingSpec.java | 8 +-
.../query/groupby/having/EqualToHavingSpec.java | 10 +-
.../groupby/having/GreaterThanHavingSpec.java | 10 +-
.../query/groupby/having/LessThanHavingSpec.java | 10 +-
.../druid/query/groupby/having/NotHavingSpec.java | 8 +-
.../druid/query/groupby/having/OrHavingSpec.java | 8 +-
.../druid/query/search/CursorOnlyStrategy.java | 11 +-
.../ByteBufferMinMaxOffsetHeapTest.java | 6 +-
.../org/apache/druid/client/DirectDruidClient.java | 2 +-
.../org/apache/druid/server/QueryResourceTest.java | 21 +-
.../org/apache/druid/cli/CliHadoopIndexer.java | 13 +-
.../java/org/apache/druid/cli/ExportMetadata.java | 48 +-
34 files changed, 389 insertions(+), 436 deletions(-)
diff --git a/.idea/inspectionProfiles/Druid.xml
b/.idea/inspectionProfiles/Druid.xml
index a266eb9..4025da7 100644
--- a/.idea/inspectionProfiles/Druid.xml
+++ b/.idea/inspectionProfiles/Druid.xml
@@ -20,12 +20,14 @@
<inspection_tool class="CatchMayIgnoreException" enabled="true"
level="WARNING" enabled_by_default="true">
<option name="m_ignoreCatchBlocksWithComments" value="false" />
</inspection_tool>
+ <inspection_tool class="CharsetObjectCanBeUsed" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="CheckDtdRefs" enabled="true" level="Non-TeamCity
Error" enabled_by_default="true" />
<inspection_tool class="CheckValidXmlInScriptTagBody" enabled="true"
level="WARNING" enabled_by_default="true" />
<inspection_tool class="CheckXmlFileWithXercesValidator" enabled="true"
level="Non-TeamCity Error" enabled_by_default="true" />
<inspection_tool class="ClassGetClass" enabled="true" level="ERROR"
enabled_by_default="true" />
<inspection_tool class="ClassInitializerMayBeStatic" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="ClassNewInstance" enabled="true" level="WARNING"
enabled_by_default="true" />
+ <inspection_tool class="CollectionAddAllCanBeReplacedWithConstructor"
enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="CollectionAddedToSelf" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="ComparableImplementedButEqualsNotOverridden"
enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="ComparatorMethodParameterNotUsed" enabled="true"
level="ERROR" enabled_by_default="true" />
@@ -45,7 +47,7 @@
</inspection_tool>
<inspection_tool class="DuplicateThrows" enabled="true" level="ERROR"
enabled_by_default="true" />
<inspection_tool class="EmptyInitializer" enabled="true" level="ERROR"
enabled_by_default="true" />
- <inspection_tool class="EmptyStatementBody" enabled="true" level="WARNING"
enabled_by_default="true">
+ <inspection_tool class="EmptyStatementBody" enabled="true" level="ERROR"
enabled_by_default="true">
<option name="m_reportEmptyBlocks" value="true" />
<option name="commentsAreContent" value="true" />
</inspection_tool>
@@ -153,6 +155,7 @@
<inspection_tool class="OverwrittenKey" enabled="true" level="ERROR"
enabled_by_default="true" />
<inspection_tool class="PointlessIndexOfComparison" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="PrimitiveArrayArgumentToVariableArgMethod"
enabled="true" level="ERROR" enabled_by_default="true" />
+ <inspection_tool class="RedundantCollectionOperation" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="RedundantStringOperation" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="RedundantThrows" enabled="true" level="ERROR"
enabled_by_default="true" />
<inspection_tool class="RedundantTypeArguments" enabled="true"
level="ERROR" enabled_by_default="true" />
@@ -384,6 +387,7 @@
<inspection_tool class="StaticCallOnSubclass" enabled="true" level="ERROR"
enabled_by_default="true" />
<inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="StaticPseudoFunctionalStyleMethod" enabled="true"
level="INFORMATION" enabled_by_default="true" />
+ <inspection_tool class="StringBufferReplaceableByString" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="StringConcatenationInFormatCall" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="StringConcatenationInLoops" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="StringConcatenationInMessageFormatCall"
enabled="true" level="ERROR" enabled_by_default="true" />
@@ -415,6 +419,7 @@
<inspection_tool class="ToArrayCallWithZeroLengthArrayArgument"
enabled="true" level="WARNING" enabled_by_default="true">
<option name="myMode" value="BY_LEVEL" />
</inspection_tool>
+ <inspection_tool class="TypeParameterHidesVisibleType" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="UnnecessaryCallToStringValueOf" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="UnnecessaryEnumModifier" enabled="true"
level="ERROR" enabled_by_default="true" />
<inspection_tool class="UnnecessaryFullyQualifiedName" enabled="true"
level="WARNING" enabled_by_default="true">
@@ -470,4 +475,4 @@
<option name="ADD_NONJAVA_TO_ENTRIES" value="true" />
</inspection_tool>
</profile>
-</component>
\ No newline at end of file
+</component>
diff --git
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java
index b1f910b..02107f0 100644
---
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java
+++
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/GroupByBenchmark.java
@@ -735,7 +735,7 @@ public class GroupByBenchmark
{
List<QueryRunner<ResultRow>> runners = new ArrayList<>();
for (int i = 0; i < numSegments; i++) {
- String segmentName = "qIndex" + i;
+ String segmentName = "qIndex " + i;
QueryRunner<ResultRow> runner = QueryBenchmarkUtil.makeQueryRunner(
factory,
SegmentId.dummy(segmentName),
diff --git
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java
index 64746c7..06320c3 100644
---
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java
+++
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SearchBenchmark.java
@@ -444,7 +444,7 @@ public class SearchBenchmark
List<QueryRunner<Row>> singleSegmentRunners = new ArrayList<>();
QueryToolChest toolChest = factory.getToolchest();
for (int i = 0; i < numSegments; i++) {
- String segmentName = "qIndex" + i;
+ String segmentName = "qIndex " + i;
final QueryRunner<Result<SearchResultValue>> runner =
QueryBenchmarkUtil.makeQueryRunner(
factory,
SegmentId.dummy(segmentName),
diff --git
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java
index 3077abf..bf72eea 100644
---
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java
+++
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java
@@ -385,7 +385,7 @@ public class TimeseriesBenchmark
List<QueryRunner<Result<TimeseriesResultValue>>> singleSegmentRunners =
new ArrayList<>();
QueryToolChest toolChest = factory.getToolchest();
for (int i = 0; i < numSegments; i++) {
- SegmentId segmentId = SegmentId.dummy("qIndex" + i);
+ SegmentId segmentId = SegmentId.dummy("qIndex " + i);
QueryRunner<Result<TimeseriesResultValue>> runner =
QueryBenchmarkUtil.makeQueryRunner(
factory,
segmentId,
diff --git
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java
index afaf122..c9ccfd7 100644
---
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java
+++
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/TopNBenchmark.java
@@ -350,7 +350,7 @@ public class TopNBenchmark
List<QueryRunner<Result<TopNResultValue>>> singleSegmentRunners = new
ArrayList<>();
QueryToolChest toolChest = factory.getToolchest();
for (int i = 0; i < numSegments; i++) {
- SegmentId segmentId = SegmentId.dummy("qIndex" + i);
+ SegmentId segmentId = SegmentId.dummy("qIndex " + i);
QueryRunner<Result<TopNResultValue>> runner =
QueryBenchmarkUtil.makeQueryRunner(
factory,
segmentId,
diff --git
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
index 2ac67af..f87773d 100644
---
a/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
+++
b/benchmarks/src/test/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
@@ -346,7 +346,7 @@ public class TimeCompareBenchmark
List<QueryRunner<Result<TopNResultValue>>> singleSegmentRunners = new
ArrayList<>();
QueryToolChest toolChest = topNFactory.getToolchest();
for (int i = 0; i < numSegments; i++) {
- SegmentId segmentId = SegmentId.dummy("qIndex" + i);
+ SegmentId segmentId = SegmentId.dummy("qIndex " + i);
QueryRunner<Result<TopNResultValue>> runner =
QueryBenchmarkUtil.makeQueryRunner(
topNFactory,
segmentId,
@@ -372,7 +372,7 @@ public class TimeCompareBenchmark
List<QueryRunner<Result<TimeseriesResultValue>>> singleSegmentRunnersT =
new ArrayList<>();
QueryToolChest toolChestT = timeseriesFactory.getToolchest();
for (int i = 0; i < numSegments; i++) {
- SegmentId segmentId = SegmentId.dummy("qIndex" + i);
+ SegmentId segmentId = SegmentId.dummy("qIndex " + i);
QueryRunner<Result<TimeseriesResultValue>> runner =
QueryBenchmarkUtil.makeQueryRunner(
timeseriesFactory,
segmentId,
diff --git
a/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
b/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
index 69ded27..3cc2d06 100644
---
a/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
+++
b/core/src/test/java/org/apache/druid/java/util/http/client/JankyServersTest.java
@@ -80,6 +80,7 @@ public class JankyServersTest
InputStream in = clientSocket.getInputStream()
) {
while (in.read() != -1) {
+ /* Do nothing. Read bytes till the end of the stream. */
}
}
catch (Exception e) {
diff --git
a/core/src/test/java/org/apache/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java
b/core/src/test/java/org/apache/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java
index a0d3bec..780cd12 100644
---
a/core/src/test/java/org/apache/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java
+++
b/core/src/test/java/org/apache/druid/java/util/http/client/io/AppendableByteArrayInputStreamTest.java
@@ -229,6 +229,7 @@ public class AppendableByteArrayInputStreamTest
try {
byte[] readBytes = new byte[10];
while (in.read(readBytes) != -1) {
+ /* Do nothing. Read bytes till the end of the stream. */
}
return readBytes;
}
diff --git
a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java
b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java
index 2528a7b..8027e23 100644
---
a/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java
+++
b/extensions-contrib/graphite-emitter/src/main/java/org/apache/druid/emitter/graphite/WhiteListBasedConverter.java
@@ -41,6 +41,7 @@ import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -280,7 +281,7 @@ public class WhiteListBasedConverter implements
DruidToGraphiteEventConverter
LOGGER.info("using default whiteList map located at [%s]", actualPath);
fileContent = Resources.toString(resource, Charset.defaultCharset());
} else {
- fileContent = Files.asCharSource(new File(mapPath),
Charset.forName("UTF-8")).read();
+ fileContent = Files.asCharSource(new File(mapPath),
StandardCharsets.UTF_8).read();
}
return mapper.readerFor(new TypeReference<ImmutableSortedMap<String,
ImmutableSet<String>>>()
{
diff --git
a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java
b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java
index 5c89041..efd1b92 100644
---
a/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java
+++
b/extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTestUtils.java
@@ -29,6 +29,7 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Date;
import java.util.HashMap;
@@ -42,7 +43,7 @@ public class AzureTestUtils extends EasyMockSupport
public static File createZipTempFile(final String segmentFileName, final
String content) throws IOException
{
final File zipFile = Files.createTempFile("index", ".zip").toFile();
- final byte[] value = content.getBytes("utf8");
+ final byte[] value = content.getBytes(StandardCharsets.UTF_8);
try (ZipOutputStream zipStream = new ZipOutputStream(new
FileOutputStream(zipFile))) {
zipStream.putNextEntry(new ZipEntry(segmentFileName));
diff --git
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java
index 4a5518b..21e170e 100644
---
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java
+++
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java
@@ -127,8 +127,8 @@ public class HllSketchUnionPostAggregator implements
PostAggregator
return getClass().getSimpleName() + "{" +
"name='" + name + '\'' +
", fields=" + fields +
- "lgK=" + lgK +
- "tgtHllType=" + tgtHllType +
+ ", lgK=" + lgK +
+ ", tgtHllType=" + tgtHllType +
"}";
}
diff --git
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java
index f639317..19716d9 100644
---
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java
+++
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java
@@ -54,7 +54,7 @@ import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -508,7 +508,7 @@ public class SketchAggregationTest
{
return Files.asCharSource(
new
File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
- Charset.forName("UTF-8")
+ StandardCharsets.UTF_8
).read();
}
}
diff --git
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java
index cca8381..a528a31 100644
---
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java
+++
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java
@@ -48,7 +48,7 @@ import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@@ -317,7 +317,7 @@ public class SketchAggregationWithSimpleDataTest extends
InitializedNullHandling
{
return Files.asCharSource(
new
File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
- Charset.forName("UTF-8")
+ StandardCharsets.UTF_8
).read();
}
}
diff --git
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java
index de327c0..ed8cf36 100644
---
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java
+++
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java
@@ -51,7 +51,7 @@ import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@@ -257,7 +257,7 @@ public class OldApiSketchAggregationTest extends
InitializedNullHandlingTest
{
return Files.asCharSource(
new
File(OldApiSketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
- Charset.forName("UTF-8")
+ StandardCharsets.UTF_8
).read();
}
}
diff --git
a/extensions-core/druid-ranger-security/src/test/java/org/apache/druid/security/ranger/authorizer/RangerAdminClientImpl.java
b/extensions-core/druid-ranger-security/src/test/java/org/apache/druid/security/ranger/authorizer/RangerAdminClientImpl.java
index c28bdc3..3d72018 100644
---
a/extensions-core/druid-ranger-security/src/test/java/org/apache/druid/security/ranger/authorizer/RangerAdminClientImpl.java
+++
b/extensions-core/druid-ranger-security/src/test/java/org/apache/druid/security/ranger/authorizer/RangerAdminClientImpl.java
@@ -26,6 +26,7 @@ import
org.apache.ranger.admin.client.AbstractRangerAdminClient;
import org.apache.ranger.plugin.util.ServicePolicies;
import java.io.File;
+import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -62,7 +63,7 @@ public class RangerAdminClientImpl extends
AbstractRangerAdminClient
Path cachePath = FileSystems.getDefault().getPath(basedir,
"/src/test/resources/" + CACHE_FILE_NAME);
byte[] cacheBytes = Files.readAllBytes(cachePath);
- return gson.fromJson(new String(cacheBytes, "UTF8"),
ServicePolicies.class);
+ return gson.fromJson(new String(cacheBytes, StandardCharsets.UTF_8),
ServicePolicies.class);
}
}
diff --git
a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTest.java
b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTest.java
index f89167d..bf92974 100644
---
a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTest.java
+++
b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTest.java
@@ -83,11 +83,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES2.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES2[i];
- }
Assert.assertArrayEquals(
new float[]{2.5f, 20.0f, 46.76f},
@@ -164,11 +159,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES2.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES2[i];
- }
Assert.assertArrayEquals(
new float[]{3.0f, 20.0f, 47.52f},
@@ -189,11 +179,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES3.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES3[i];
- }
Assert.assertArrayEquals(
new float[]{14.857142f, 20.0f, 28.4f},
@@ -214,11 +199,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES4.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES4[i];
- }
Assert.assertArrayEquals(
new float[]{-8.5f, 20.0f, 67.6f},
@@ -239,11 +219,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES5.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES5[i];
- }
Assert.assertArrayEquals(
new float[]{2.125f, 5.5f, 9.82f},
@@ -264,11 +239,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES6.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES6[i];
- }
Assert.assertArrayEquals(
new float[]{2.125f, 5.5f, 9.82f},
@@ -289,11 +259,6 @@ public class FixedBucketsHistogramTest
);
float[] quantiles = h.percentilesFloat(new double[]{12.5f, 50.0f, 98f});
- double[] doubles = new double[VALUES7.length];
-
- for (int i = 0; i < doubles.length; i++) {
- doubles[i] = VALUES7[i];
- }
Assert.assertArrayEquals(
new float[]{3.25f, 10f, 25.88f},
diff --git
a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPullerTest.java
b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPullerTest.java
index 914785d..fdeb221 100644
---
a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPullerTest.java
+++
b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPullerTest.java
@@ -40,6 +40,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.zip.GZIPOutputStream;
@@ -87,7 +88,7 @@ public class S3DataSegmentPullerTest
final String bucket = "bucket";
final String keyPrefix = "prefix/dir/0";
final ServerSideEncryptingAmazonS3 s3Client =
EasyMock.createStrictMock(ServerSideEncryptingAmazonS3.class);
- final byte[] value = bucket.getBytes("utf8");
+ final byte[] value = bucket.getBytes(StandardCharsets.UTF_8);
final File tmpFile = temporaryFolder.newFile("gzTest.gz");
@@ -144,7 +145,7 @@ public class S3DataSegmentPullerTest
final String bucket = "bucket";
final String keyPrefix = "prefix/dir/0";
final ServerSideEncryptingAmazonS3 s3Client =
EasyMock.createStrictMock(ServerSideEncryptingAmazonS3.class);
- final byte[] value = bucket.getBytes("utf8");
+ final byte[] value = bucket.getBytes(StandardCharsets.UTF_8);
final File tmpFile = temporaryFolder.newFile("gzTest.gz");
diff --git
a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
index 7af8983..84f5c8f 100644
--- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
+++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java
@@ -49,7 +49,6 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Progressable;
import javax.annotation.Nullable;
-
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
@@ -63,7 +62,6 @@ import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
-import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@@ -553,13 +551,15 @@ public class JobHelper
{
long size = 0L;
try (ZipOutputStream outputStream = new ZipOutputStream(baseOutputStream))
{
- List<String> filesToCopy = Arrays.asList(baseDir.list());
- for (String fileName : filesToCopy) {
- final File fileToCopy = new File(baseDir, fileName);
- if (Files.isRegularFile(fileToCopy.toPath())) {
- size += copyFileToZipStream(fileToCopy, outputStream, progressable);
- } else {
- log.warn("File at [%s] is not a regular file! skipping as part of
zip", fileToCopy.getPath());
+ String[] filesToCopy = baseDir.list();
+ if (filesToCopy != null) {
+ for (String fileName : filesToCopy) {
+ final File fileToCopy = new File(baseDir, fileName);
+ if (Files.isRegularFile(fileToCopy.toPath())) {
+ size += copyFileToZipStream(fileToCopy, outputStream,
progressable);
+ } else {
+ log.warn("File at [%s] is not a regular file! skipping as part of
zip", fileToCopy.getPath());
+ }
}
}
outputStream.flush();
diff --git
a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
index 45c4ba8..d4f3ba2 100644
---
a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
+++
b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
@@ -71,7 +71,6 @@ import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
@@ -98,297 +97,295 @@ public class IndexGeneratorJobTest
"maxBytesInMemory={8}, aggs={9},
datasourceName={10}, forceExtendableShardSpecs={11}")
public static Collection<Object[]> constructFeed()
{
- final List<Object[]> baseConstructors = Arrays.asList(
- new Object[][]{
- {
- false,
- "single",
- "2014-10-22T00:00:00Z/P2D",
- new String[][][]{
- {
- {null, "c.example.com"},
- {"c.example.com", "e.example.com"},
- {"e.example.com", "g.example.com"},
- {"g.example.com", "i.example.com"},
- {"i.example.com", null}
- },
- {
- {null, "c.example.com"},
- {"c.example.com", "e.example.com"},
- {"e.example.com", "g.example.com"},
- {"g.example.com", "i.example.com"},
- {"i.example.com", null}
- }
+ final Object[][] baseConstructors = new Object[][]{
+ {
+ false,
+ "single",
+ "2014-10-22T00:00:00Z/P2D",
+ new String[][][]{
+ {
+ {null, "c.example.com"},
+ {"c.example.com", "e.example.com"},
+ {"e.example.com", "g.example.com"},
+ {"g.example.com", "i.example.com"},
+ {"i.example.com", null}
},
- ImmutableList.of(
- "2014102200,a.example.com,100",
- "2014102200,b.exmaple.com,50",
- "2014102200,c.example.com,200",
- "2014102200,d.example.com,250",
- "2014102200,e.example.com,123",
- "2014102200,f.example.com,567",
- "2014102200,g.example.com,11",
- "2014102200,h.example.com,251",
- "2014102200,i.example.com,963",
- "2014102200,j.example.com,333",
- "2014102300,a.example.com,100",
- "2014102300,b.exmaple.com,50",
- "2014102300,c.example.com,200",
- "2014102300,d.example.com,250",
- "2014102300,e.example.com,123",
- "2014102300,f.example.com,567",
- "2014102300,g.example.com,11",
- "2014102300,h.example.com,251",
- "2014102300,i.example.com,963",
- "2014102300,j.example.com,333"
- ),
- null,
- new StringInputRowParser(
- new CSVParseSpec(
- new TimestampSpec("timestamp", "yyyyMMddHH", null),
- new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
- null,
- ImmutableList.of("timestamp", "host", "visited_num"),
- false,
- 0
- ),
- null
- ),
- null,
- null,
- AGGS1,
- "website"
+ {
+ {null, "c.example.com"},
+ {"c.example.com", "e.example.com"},
+ {"e.example.com", "g.example.com"},
+ {"g.example.com", "i.example.com"},
+ {"i.example.com", null}
+ }
},
- {
- false,
- "hashed",
- "2014-10-22T00:00:00Z/P1D",
- new Integer[][][]{
- {
- {0, 4},
- {1, 4},
- {2, 4},
- {3, 4}
- }
- },
- ImmutableList.of(
- "2014102200,a.example.com,100",
- "2014102201,b.exmaple.com,50",
- "2014102202,c.example.com,200",
- "2014102203,d.example.com,250",
- "2014102204,e.example.com,123",
- "2014102205,f.example.com,567",
- "2014102206,g.example.com,11",
- "2014102207,h.example.com,251",
- "2014102208,i.example.com,963",
- "2014102209,j.example.com,333",
- "2014102210,k.example.com,253",
- "2014102211,l.example.com,321",
- "2014102212,m.example.com,3125",
- "2014102213,n.example.com,234",
- "2014102214,o.example.com,325",
- "2014102215,p.example.com,3533",
- "2014102216,q.example.com,500",
- "2014102216,q.example.com,87"
- ),
- null,
- new HadoopyStringInputRowParser(
- new CSVParseSpec(
- new TimestampSpec("timestamp", "yyyyMMddHH", null),
- new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
- null,
- ImmutableList.of("timestamp", "host", "visited_num"),
- false,
- 0
- )
+ ImmutableList.of(
+ "2014102200,a.example.com,100",
+ "2014102200,b.exmaple.com,50",
+ "2014102200,c.example.com,200",
+ "2014102200,d.example.com,250",
+ "2014102200,e.example.com,123",
+ "2014102200,f.example.com,567",
+ "2014102200,g.example.com,11",
+ "2014102200,h.example.com,251",
+ "2014102200,i.example.com,963",
+ "2014102200,j.example.com,333",
+ "2014102300,a.example.com,100",
+ "2014102300,b.exmaple.com,50",
+ "2014102300,c.example.com,200",
+ "2014102300,d.example.com,250",
+ "2014102300,e.example.com,123",
+ "2014102300,f.example.com,567",
+ "2014102300,g.example.com,11",
+ "2014102300,h.example.com,251",
+ "2014102300,i.example.com,963",
+ "2014102300,j.example.com,333"
+ ),
+ null,
+ new StringInputRowParser(
+ new CSVParseSpec(
+ new TimestampSpec("timestamp", "yyyyMMddHH", null),
+ new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
+ null,
+ ImmutableList.of("timestamp", "host", "visited_num"),
+ false,
+ 0
),
- null,
- null,
- AGGS1,
- "website"
+ null
+ ),
+ null,
+ null,
+ AGGS1,
+ "website"
+ },
+ {
+ false,
+ "hashed",
+ "2014-10-22T00:00:00Z/P1D",
+ new Integer[][][]{
+ {
+ {0, 4},
+ {1, 4},
+ {2, 4},
+ {3, 4}
+ }
},
- {
- true,
- "hashed",
- "2014-10-22T00:00:00Z/P1D",
- new Integer[][][]{
- {
- {0, 4},
- {1, 4},
- {2, 4},
- {3, 4}
- }
- },
- ImmutableList.of(
- "2014102200,a.example.com,100",
- "2014102201,b.exmaple.com,50",
- "2014102202,c.example.com,200",
- "2014102203,d.example.com,250",
- "2014102204,e.example.com,123",
- "2014102205,f.example.com,567",
- "2014102206,g.example.com,11",
- "2014102207,h.example.com,251",
- "2014102208,i.example.com,963",
- "2014102209,j.example.com,333",
- "2014102210,k.example.com,253",
- "2014102211,l.example.com,321",
- "2014102212,m.example.com,3125",
- "2014102213,n.example.com,234",
- "2014102214,o.example.com,325",
- "2014102215,p.example.com,3533",
- "2014102216,q.example.com,500",
- "2014102216,q.example.com,87"
- ),
- null,
- new StringInputRowParser(
- new CSVParseSpec(
- new TimestampSpec("timestamp", "yyyyMMddHH", null),
- new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
- null,
- ImmutableList.of("timestamp", "host", "visited_num"),
- false,
- 0
- ),
- null
- ),
- null,
- null,
- AGGS1,
- "website"
+ ImmutableList.of(
+ "2014102200,a.example.com,100",
+ "2014102201,b.exmaple.com,50",
+ "2014102202,c.example.com,200",
+ "2014102203,d.example.com,250",
+ "2014102204,e.example.com,123",
+ "2014102205,f.example.com,567",
+ "2014102206,g.example.com,11",
+ "2014102207,h.example.com,251",
+ "2014102208,i.example.com,963",
+ "2014102209,j.example.com,333",
+ "2014102210,k.example.com,253",
+ "2014102211,l.example.com,321",
+ "2014102212,m.example.com,3125",
+ "2014102213,n.example.com,234",
+ "2014102214,o.example.com,325",
+ "2014102215,p.example.com,3533",
+ "2014102216,q.example.com,500",
+ "2014102216,q.example.com,87"
+ ),
+ null,
+ new HadoopyStringInputRowParser(
+ new CSVParseSpec(
+ new TimestampSpec("timestamp", "yyyyMMddHH", null),
+ new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
+ null,
+ ImmutableList.of("timestamp", "host", "visited_num"),
+ false,
+ 0
+ )
+ ),
+ null,
+ null,
+ AGGS1,
+ "website"
+ },
+ {
+ true,
+ "hashed",
+ "2014-10-22T00:00:00Z/P1D",
+ new Integer[][][]{
+ {
+ {0, 4},
+ {1, 4},
+ {2, 4},
+ {3, 4}
+ }
},
- {
- false,
- "single",
- "2014-10-22T00:00:00Z/P2D",
- new String[][][]{
- {
- {null, "c.example.com"},
- {"c.example.com", "e.example.com"},
- {"e.example.com", "g.example.com"},
- {"g.example.com", "i.example.com"},
- {"i.example.com", null}
- },
- {
- {null, "c.example.com"},
- {"c.example.com", "e.example.com"},
- {"e.example.com", "g.example.com"},
- {"g.example.com", "i.example.com"},
- {"i.example.com", null}
- }
- },
- ImmutableList.of(
- "2014102200,a.example.com,100",
- "2014102200,b.exmaple.com,50",
- "2014102200,c.example.com,200",
- "2014102200,d.example.com,250",
- "2014102200,e.example.com,123",
- "2014102200,f.example.com,567",
- "2014102200,g.example.com,11",
- "2014102200,h.example.com,251",
- "2014102200,i.example.com,963",
- "2014102200,j.example.com,333",
- "2014102300,a.example.com,100",
- "2014102300,b.exmaple.com,50",
- "2014102300,c.example.com,200",
- "2014102300,d.example.com,250",
- "2014102300,e.example.com,123",
- "2014102300,f.example.com,567",
- "2014102300,g.example.com,11",
- "2014102300,h.example.com,251",
- "2014102300,i.example.com,963",
- "2014102300,j.example.com,333"
- ),
- SequenceFileInputFormat.class.getName(),
- new HadoopyStringInputRowParser(
- new CSVParseSpec(
- new TimestampSpec("timestamp", "yyyyMMddHH", null),
- new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
- null,
- ImmutableList.of("timestamp", "host", "visited_num"),
- false,
- 0
- )
+ ImmutableList.of(
+ "2014102200,a.example.com,100",
+ "2014102201,b.exmaple.com,50",
+ "2014102202,c.example.com,200",
+ "2014102203,d.example.com,250",
+ "2014102204,e.example.com,123",
+ "2014102205,f.example.com,567",
+ "2014102206,g.example.com,11",
+ "2014102207,h.example.com,251",
+ "2014102208,i.example.com,963",
+ "2014102209,j.example.com,333",
+ "2014102210,k.example.com,253",
+ "2014102211,l.example.com,321",
+ "2014102212,m.example.com,3125",
+ "2014102213,n.example.com,234",
+ "2014102214,o.example.com,325",
+ "2014102215,p.example.com,3533",
+ "2014102216,q.example.com,500",
+ "2014102216,q.example.com,87"
+ ),
+ null,
+ new StringInputRowParser(
+ new CSVParseSpec(
+ new TimestampSpec("timestamp", "yyyyMMddHH", null),
+ new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
+ null,
+ ImmutableList.of("timestamp", "host", "visited_num"),
+ false,
+ 0
),
- null,
- null,
- AGGS1,
- "website"
- },
- {
- // Tests that new indexes inherit the dimension order from
previous index
- false,
- "hashed",
- "2014-10-22T00:00:00Z/P1D",
- new Integer[][][]{
- {
- {0, 1} // use a single partition, dimension order
inheritance is not supported across partitions
- }
+ null
+ ),
+ null,
+ null,
+ AGGS1,
+ "website"
+ },
+ {
+ false,
+ "single",
+ "2014-10-22T00:00:00Z/P2D",
+ new String[][][]{
+ {
+ {null, "c.example.com"},
+ {"c.example.com", "e.example.com"},
+ {"e.example.com", "g.example.com"},
+ {"g.example.com", "i.example.com"},
+ {"i.example.com", null}
},
- ImmutableList.of(
- "{\"ts\":\"2014102200\", \"X\":\"x.example.com\"}",
- "{\"ts\":\"2014102201\", \"Y\":\"y.example.com\"}",
- "{\"ts\":\"2014102202\", \"M\":\"m.example.com\"}",
- "{\"ts\":\"2014102203\", \"Q\":\"q.example.com\"}",
- "{\"ts\":\"2014102204\", \"B\":\"b.example.com\"}",
- "{\"ts\":\"2014102205\", \"F\":\"f.example.com\"}"
- ),
- null,
- new StringInputRowParser(
- new JSONParseSpec(
- new TimestampSpec("ts", "yyyyMMddHH", null),
- new DimensionsSpec(null, null, null),
- null,
- null
- ),
+ {
+ {null, "c.example.com"},
+ {"c.example.com", "e.example.com"},
+ {"e.example.com", "g.example.com"},
+ {"g.example.com", "i.example.com"},
+ {"i.example.com", null}
+ }
+ },
+ ImmutableList.of(
+ "2014102200,a.example.com,100",
+ "2014102200,b.exmaple.com,50",
+ "2014102200,c.example.com,200",
+ "2014102200,d.example.com,250",
+ "2014102200,e.example.com,123",
+ "2014102200,f.example.com,567",
+ "2014102200,g.example.com,11",
+ "2014102200,h.example.com,251",
+ "2014102200,i.example.com,963",
+ "2014102200,j.example.com,333",
+ "2014102300,a.example.com,100",
+ "2014102300,b.exmaple.com,50",
+ "2014102300,c.example.com,200",
+ "2014102300,d.example.com,250",
+ "2014102300,e.example.com,123",
+ "2014102300,f.example.com,567",
+ "2014102300,g.example.com,11",
+ "2014102300,h.example.com,251",
+ "2014102300,i.example.com,963",
+ "2014102300,j.example.com,333"
+ ),
+ SequenceFileInputFormat.class.getName(),
+ new HadoopyStringInputRowParser(
+ new CSVParseSpec(
+ new TimestampSpec("timestamp", "yyyyMMddHH", null),
+ new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")),
null, null),
+ null,
+ ImmutableList.of("timestamp", "host", "visited_num"),
+ false,
+ 0
+ )
+ ),
+ null,
+ null,
+ AGGS1,
+ "website"
+ },
+ {
+ // Tests that new indexes inherit the dimension order from
previous index
+ false,
+ "hashed",
+ "2014-10-22T00:00:00Z/P1D",
+ new Integer[][][]{
+ {
+ {0, 1} // use a single partition, dimension order
inheritance is not supported across partitions
+ }
+ },
+ ImmutableList.of(
+ "{\"ts\":\"2014102200\", \"X\":\"x.example.com\"}",
+ "{\"ts\":\"2014102201\", \"Y\":\"y.example.com\"}",
+ "{\"ts\":\"2014102202\", \"M\":\"m.example.com\"}",
+ "{\"ts\":\"2014102203\", \"Q\":\"q.example.com\"}",
+ "{\"ts\":\"2014102204\", \"B\":\"b.example.com\"}",
+ "{\"ts\":\"2014102205\", \"F\":\"f.example.com\"}"
+ ),
+ null,
+ new StringInputRowParser(
+ new JSONParseSpec(
+ new TimestampSpec("ts", "yyyyMMddHH", null),
+ new DimensionsSpec(null, null, null),
+ null,
null
),
- 1, // force 1 row max per index for easier testing
- null,
- AGGS2,
- "inherit_dims"
+ null
+ ),
+ 1, // force 1 row max per index for easier testing
+ null,
+ AGGS2,
+ "inherit_dims"
+ },
+ {
+ // Tests that pre-specified dim order is maintained across indexes.
+ false,
+ "hashed",
+ "2014-10-22T00:00:00Z/P1D",
+ new Integer[][][]{
+ {
+ {0, 1}
+ }
},
- {
- // Tests that pre-specified dim order is maintained across
indexes.
- false,
- "hashed",
- "2014-10-22T00:00:00Z/P1D",
- new Integer[][][]{
- {
- {0, 1}
- }
- },
- ImmutableList.of(
- "{\"ts\":\"2014102200\", \"X\":\"x.example.com\"}",
- "{\"ts\":\"2014102201\", \"Y\":\"y.example.com\"}",
- "{\"ts\":\"2014102202\", \"M\":\"m.example.com\"}",
- "{\"ts\":\"2014102203\", \"Q\":\"q.example.com\"}",
- "{\"ts\":\"2014102204\", \"B\":\"b.example.com\"}",
- "{\"ts\":\"2014102205\", \"F\":\"f.example.com\"}"
- ),
- null,
- new StringInputRowParser(
- new JSONParseSpec(
- new TimestampSpec("ts", "yyyyMMddHH", null),
- new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of(
- "B",
- "F",
- "M",
- "Q",
- "X",
- "Y"
- )), null, null),
- null,
- null
- ),
+ ImmutableList.of(
+ "{\"ts\":\"2014102200\", \"X\":\"x.example.com\"}",
+ "{\"ts\":\"2014102201\", \"Y\":\"y.example.com\"}",
+ "{\"ts\":\"2014102202\", \"M\":\"m.example.com\"}",
+ "{\"ts\":\"2014102203\", \"Q\":\"q.example.com\"}",
+ "{\"ts\":\"2014102204\", \"B\":\"b.example.com\"}",
+ "{\"ts\":\"2014102205\", \"F\":\"f.example.com\"}"
+ ),
+ null,
+ new StringInputRowParser(
+ new JSONParseSpec(
+ new TimestampSpec("ts", "yyyyMMddHH", null),
+ new
DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of(
+ "B",
+ "F",
+ "M",
+ "Q",
+ "X",
+ "Y"
+ )), null, null),
+ null,
null
),
- 1, // force 1 row max per index for easier testing
- null,
- AGGS2,
- "inherit_dims2"
- }
+ null
+ ),
+ 1, // force 1 row max per index for easier testing
+ null,
+ AGGS2,
+ "inherit_dims2"
}
- );
+ };
// Run each baseConstructor with/without forceExtendableShardSpecs.
final List<Object[]> constructors = new ArrayList<>();
diff --git
a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java
b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java
index 52624f4..cf562f1 100644
---
a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java
+++
b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java
@@ -315,7 +315,7 @@ public class PendingTaskBasedWorkerProvisioningStrategy
extends AbstractWorkerPr
// create a dummy worker and try to simulate assigning task to it.
workerRunningTask = createDummyWorker(
SCHEME,
- "dummy" + need,
+ "dummy " + need,
capacity,
workerTaskRunnerConfig.getMinWorkerVersion()
);
diff --git
a/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisor.java
b/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisor.java
index 682a560..247ff0e 100644
---
a/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisor.java
+++
b/indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisor.java
@@ -2080,9 +2080,7 @@ public abstract class
SeekableStreamSupervisor<PartitionIdType, SequenceOffsetTy
try {
boolean success =
indexerMetadataStorageCoordinator.resetDataSourceMetadata(dataSource,
cleanedMetadata);
- if (success) {
-
- } else {
+ if (!success) {
log.error("Failed to update datasource metadata[%s] with expired
partitions removed", cleanedMetadata);
}
}
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractIndexerTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractIndexerTest.java
index 88f6b00..03df90e 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractIndexerTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractIndexerTest.java
@@ -35,6 +35,7 @@ import org.joda.time.Interval;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
@@ -116,7 +117,7 @@ public abstract class AbstractIndexerTest
{
final InputStream inputStream =
ITRealtimeIndexTaskTest.class.getResourceAsStream(file);
try {
- return IOUtils.toString(inputStream, "UTF-8");
+ return IOUtils.toString(inputStream, StandardCharsets.UTF_8);
}
finally {
IOUtils.closeQuietly(inputStream);
diff --git
a/processing/src/main/java/org/apache/druid/query/groupby/having/AndHavingSpec.java
b/processing/src/main/java/org/apache/druid/query/groupby/having/AndHavingSpec.java
index 9c11ab9..ad6cc32 100644
---
a/processing/src/main/java/org/apache/druid/query/groupby/having/AndHavingSpec.java
+++
b/processing/src/main/java/org/apache/druid/query/groupby/having/AndHavingSpec.java
@@ -95,11 +95,9 @@ public class AndHavingSpec implements HavingSpec
@Override
public String toString()
{
- final StringBuilder sb = new StringBuilder();
- sb.append("AndHavingSpec");
- sb.append("{havingSpecs=").append(havingSpecs);
- sb.append('}');
- return sb.toString();
+ return "AndHavingSpec{" +
+ "havingSpecs=" + havingSpecs +
+ '}';
}
@Override
diff --git
a/processing/src/main/java/org/apache/druid/query/groupby/having/EqualToHavingSpec.java
b/processing/src/main/java/org/apache/druid/query/groupby/having/EqualToHavingSpec.java
index 8ac0957..86e13b9 100644
---
a/processing/src/main/java/org/apache/druid/query/groupby/having/EqualToHavingSpec.java
+++
b/processing/src/main/java/org/apache/druid/query/groupby/having/EqualToHavingSpec.java
@@ -125,12 +125,10 @@ public class EqualToHavingSpec implements HavingSpec
@Override
public String toString()
{
- final StringBuilder sb = new StringBuilder();
- sb.append("EqualToHavingSpec");
- sb.append("{aggregationName='").append(aggregationName).append('\'');
- sb.append(", value=").append(value);
- sb.append('}');
- return sb.toString();
+ return "EqualToHavingSpec{" +
+ "aggregationName='" + aggregationName + '\'' +
+ ", value=" + value +
+ '}';
}
@Override
diff --git
a/processing/src/main/java/org/apache/druid/query/groupby/having/GreaterThanHavingSpec.java
b/processing/src/main/java/org/apache/druid/query/groupby/having/GreaterThanHavingSpec.java
index b073e40..7b59978 100644
---
a/processing/src/main/java/org/apache/druid/query/groupby/having/GreaterThanHavingSpec.java
+++
b/processing/src/main/java/org/apache/druid/query/groupby/having/GreaterThanHavingSpec.java
@@ -121,12 +121,10 @@ public class GreaterThanHavingSpec implements HavingSpec
@Override
public String toString()
{
- final StringBuilder sb = new StringBuilder();
- sb.append("GreaterThanHavingSpec");
- sb.append("{aggregationName='").append(aggregationName).append('\'');
- sb.append(", value=").append(value);
- sb.append('}');
- return sb.toString();
+ return "GreaterThanHavingSpec{" +
+ "aggregationName='" + aggregationName + '\'' +
+ ", value=" + value +
+ '}';
}
@Override
diff --git
a/processing/src/main/java/org/apache/druid/query/groupby/having/LessThanHavingSpec.java
b/processing/src/main/java/org/apache/druid/query/groupby/having/LessThanHavingSpec.java
index d4b173d..6f400ec 100644
---
a/processing/src/main/java/org/apache/druid/query/groupby/having/LessThanHavingSpec.java
+++
b/processing/src/main/java/org/apache/druid/query/groupby/having/LessThanHavingSpec.java
@@ -119,12 +119,10 @@ public class LessThanHavingSpec implements HavingSpec
@Override
public String toString()
{
- final StringBuilder sb = new StringBuilder();
- sb.append("LessThanHavingSpec");
- sb.append("{aggregationName='").append(aggregationName).append('\'');
- sb.append(", value=").append(value);
- sb.append('}');
- return sb.toString();
+ return "LessThanHavingSpec{" +
+ "aggregationName='" + aggregationName + '\'' +
+ ", value=" + value +
+ '}';
}
@Override
diff --git
a/processing/src/main/java/org/apache/druid/query/groupby/having/NotHavingSpec.java
b/processing/src/main/java/org/apache/druid/query/groupby/having/NotHavingSpec.java
index cd8f4c6..81a1a84 100644
---
a/processing/src/main/java/org/apache/druid/query/groupby/having/NotHavingSpec.java
+++
b/processing/src/main/java/org/apache/druid/query/groupby/having/NotHavingSpec.java
@@ -59,11 +59,9 @@ public class NotHavingSpec implements HavingSpec
@Override
public String toString()
{
- final StringBuilder sb = new StringBuilder();
- sb.append("NotHavingSpec");
- sb.append("{havingSpec=").append(havingSpec);
- sb.append('}');
- return sb.toString();
+ return "NotHavingSpec{" +
+ "havingSpec=" + havingSpec +
+ '}';
}
@Override
diff --git
a/processing/src/main/java/org/apache/druid/query/groupby/having/OrHavingSpec.java
b/processing/src/main/java/org/apache/druid/query/groupby/having/OrHavingSpec.java
index 097619e..d75bc57 100644
---
a/processing/src/main/java/org/apache/druid/query/groupby/having/OrHavingSpec.java
+++
b/processing/src/main/java/org/apache/druid/query/groupby/having/OrHavingSpec.java
@@ -95,11 +95,9 @@ public class OrHavingSpec implements HavingSpec
@Override
public String toString()
{
- final StringBuilder sb = new StringBuilder();
- sb.append("OrHavingSpec");
- sb.append("{havingSpecs=").append(havingSpecs);
- sb.append('}');
- return sb.toString();
+ return "OrHavingSpec{" +
+ "havingSpecs=" + havingSpecs +
+ '}';
}
@Override
diff --git
a/processing/src/main/java/org/apache/druid/query/search/CursorOnlyStrategy.java
b/processing/src/main/java/org/apache/druid/query/search/CursorOnlyStrategy.java
index ec646e6..b4483fd 100644
---
a/processing/src/main/java/org/apache/druid/query/search/CursorOnlyStrategy.java
+++
b/processing/src/main/java/org/apache/druid/query/search/CursorOnlyStrategy.java
@@ -33,7 +33,6 @@ import org.apache.druid.segment.StorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.joda.time.Interval;
-import java.util.Arrays;
import java.util.List;
public class CursorOnlyStrategy extends SearchStrategy
@@ -107,12 +106,10 @@ public class CursorOnlyStrategy extends SearchStrategy
return map;
}
- final List<ColumnSelectorPlus<SearchColumnSelectorStrategy>>
selectorPlusList = Arrays.asList(
- DimensionHandlerUtils.createColumnSelectorPluses(
- SearchQueryRunner.SEARCH_COLUMN_SELECTOR_STRATEGY_FACTORY,
- dimsToSearch,
- cursor.getColumnSelectorFactory()
- )
+ final ColumnSelectorPlus<SearchColumnSelectorStrategy>[]
selectorPlusList = DimensionHandlerUtils.createColumnSelectorPluses(
+ SearchQueryRunner.SEARCH_COLUMN_SELECTOR_STRATEGY_FACTORY,
+ dimsToSearch,
+ cursor.getColumnSelectorFactory()
);
while (!cursor.isDone()) {
diff --git
a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java
b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java
index 63245c7..29da460 100644
---
a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java
+++
b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java
@@ -119,8 +119,7 @@ public class ByteBufferMinMaxOffsetHeapTest
Collections.sort(deletedValues);
for (int deletedValue : deletedValues) {
- int idx = values.indexOf(deletedValue);
- values.remove(idx);
+ values.remove((Integer) deletedValue);
}
Assert.assertTrue(heap.getHeapSize() <= limit);
@@ -176,8 +175,7 @@ public class ByteBufferMinMaxOffsetHeapTest
Collections.sort(deletedValues);
for (int deletedValue : deletedValues) {
- int idx = values.indexOf(deletedValue);
- values.remove(idx);
+ values.remove((Integer) deletedValue);
}
Assert.assertTrue(heap.getHeapSize() <= limit);
diff --git
a/server/src/main/java/org/apache/druid/client/DirectDruidClient.java
b/server/src/main/java/org/apache/druid/client/DirectDruidClient.java
index 020446d..7c674f0 100644
--- a/server/src/main/java/org/apache/druid/client/DirectDruidClient.java
+++ b/server/src/main/java/org/apache/druid/client/DirectDruidClient.java
@@ -521,7 +521,7 @@ public class DirectDruidClient<T> implements QueryRunner<T>
return retVal;
}
- private <T> void cancelQuery(Query<T> query, String cancelUrl)
+ private void cancelQuery(Query<T> query, String cancelUrl)
{
Runnable cancelRunnable = () -> {
try {
diff --git
a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java
b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java
index 5bda433..eface40 100644
--- a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java
+++ b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java
@@ -74,6 +74,7 @@ import javax.ws.rs.core.StreamingOutput;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -208,7 +209,7 @@ public class QueryResourceTest
expectPermissiveHappyPathAuth();
Response response = queryResource.doPost(
- new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")),
+ new
ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
testServletRequest
);
@@ -240,7 +241,7 @@ public class QueryResourceTest
EasyMock.replay(testServletRequest);
Response response = queryResource.doPost(
- new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")),
+ new
ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
testServletRequest
);
@@ -275,7 +276,7 @@ public class QueryResourceTest
EasyMock.replay(testServletRequest);
Response response = queryResource.doPost(
- new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")),
+ new
ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
testServletRequest
);
@@ -314,7 +315,7 @@ public class QueryResourceTest
EasyMock.replay(smileRequest);
Response response = queryResource.doPost(
- new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")),
+ new
ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
smileRequest
);
@@ -330,7 +331,7 @@ public class QueryResourceTest
{
EasyMock.replay(testServletRequest);
Response response = queryResource.doPost(
- new ByteArrayInputStream("Meka Leka Hi Meka Hiney
Ho".getBytes("UTF-8")),
+ new ByteArrayInputStream("Meka Leka Hi Meka Hiney
Ho".getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
testServletRequest
);
@@ -400,7 +401,7 @@ public class QueryResourceTest
try {
queryResource.doPost(
- new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")),
+ new
ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
testServletRequest
);
@@ -410,7 +411,7 @@ public class QueryResourceTest
}
Response response = queryResource.doPost(
- new ByteArrayInputStream("{\"queryType\":\"timeBoundary\",
\"dataSource\":\"allow\"}".getBytes("UTF-8")),
+ new ByteArrayInputStream("{\"queryType\":\"timeBoundary\",
\"dataSource\":\"allow\"}".getBytes(StandardCharsets.UTF_8)),
null /*pretty*/,
testServletRequest
);
@@ -526,7 +527,7 @@ public class QueryResourceTest
{
try {
Response response = queryResource.doPost(
- new ByteArrayInputStream(queryString.getBytes("UTF-8")),
+ new
ByteArrayInputStream(queryString.getBytes(StandardCharsets.UTF_8)),
null,
testServletRequest
);
@@ -649,7 +650,7 @@ public class QueryResourceTest
try {
startAwaitLatch.countDown();
Response response = queryResource.doPost(
- new ByteArrayInputStream(queryString.getBytes("UTF-8")),
+ new
ByteArrayInputStream(queryString.getBytes(StandardCharsets.UTF_8)),
null,
testServletRequest
);
@@ -896,7 +897,7 @@ public class QueryResourceTest
Executors.newSingleThreadExecutor().submit(() -> {
try {
Response response = queryResource.doPost(
- new ByteArrayInputStream(query.getBytes("UTF-8")),
+ new ByteArrayInputStream(query.getBytes(StandardCharsets.UTF_8)),
null,
testServletRequest
);
diff --git a/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java
b/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java
index 651f6c2..3cc42cb 100644
--- a/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java
+++ b/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java
@@ -80,18 +80,19 @@ public class CliHadoopIndexer implements Runnable
final List<URL> extensionURLs = new ArrayList<>();
for (final File extension :
Initialization.getExtensionFilesToLoad(extensionsConfig)) {
- final ClassLoader extensionLoader =
Initialization.getClassLoaderForExtension(extension, false);
- extensionURLs.addAll(Arrays.asList(((URLClassLoader)
extensionLoader).getURLs()));
+ final URLClassLoader extensionLoader =
Initialization.getClassLoaderForExtension(extension, false);
+ extensionURLs.addAll(Arrays.asList(extensionLoader.getURLs()));
}
- final List<URL> nonHadoopURLs = new ArrayList<>();
- nonHadoopURLs.addAll(Arrays.asList(((URLClassLoader)
CliHadoopIndexer.class.getClassLoader()).getURLs()));
+ final List<URL> nonHadoopURLs = Arrays.asList(
+ ((URLClassLoader) CliHadoopIndexer.class.getClassLoader()).getURLs()
+ );
final List<URL> driverURLs = new ArrayList<>(nonHadoopURLs);
// put hadoop dependencies last to avoid jets3t & apache.httpcore
version conflicts
for (File hadoopDependency :
Initialization.getHadoopDependencyFilesToLoad(allCoordinates,
extensionsConfig)) {
- final ClassLoader hadoopLoader =
Initialization.getClassLoaderForExtension(hadoopDependency, false);
- driverURLs.addAll(Arrays.asList(((URLClassLoader)
hadoopLoader).getURLs()));
+ final URLClassLoader hadoopLoader =
Initialization.getClassLoaderForExtension(hadoopDependency, false);
+ driverURLs.addAll(Arrays.asList(hadoopLoader.getURLs()));
}
final URLClassLoader loader = new URLClassLoader(driverURLs.toArray(new
URL[0]), null);
diff --git a/services/src/main/java/org/apache/druid/cli/ExportMetadata.java
b/services/src/main/java/org/apache/druid/cli/ExportMetadata.java
index fd98163..b66fcf9 100644
--- a/services/src/main/java/org/apache/druid/cli/ExportMetadata.java
+++ b/services/src/main/java/org/apache/druid/cli/ExportMetadata.java
@@ -272,13 +272,12 @@ public class ExportMetadata extends GuiceRunnable
while ((line = reader.readLine()) != null) {
String[] parsed = PARSER.parseLine(line);
- StringBuilder newLineBuilder = new StringBuilder();
- newLineBuilder.append(parsed[0]).append(","); //dataSource
- newLineBuilder.append(parsed[1]).append(","); //created_date
-
newLineBuilder.append(rewriteHexPayloadAsEscapedJson(parsed[2])).append(",");
//commit_metadata_payload
- newLineBuilder.append(parsed[3]); //commit_metadata_sha1
- newLineBuilder.append("\n");
- writer.write(newLineBuilder.toString());
+ String newLine = parsed[0] + "," //dataSource
+ + parsed[1] + "," //created_date
+ + rewriteHexPayloadAsEscapedJson(parsed[2]) + ","
//commit_metadata_payload
+ + parsed[3] //commit_metadata_sha1
+ + "\n";
+ writer.write(newLine);
}
}
@@ -303,13 +302,12 @@ public class ExportMetadata extends GuiceRunnable
while ((line = reader.readLine()) != null) {
String[] parsed = PARSER.parseLine(line);
- StringBuilder newLineBuilder = new StringBuilder();
- newLineBuilder.append(parsed[0]).append(","); //id
- newLineBuilder.append(parsed[1]).append(","); //dataSource
- newLineBuilder.append(parsed[2]).append(","); //version
- newLineBuilder.append(rewriteHexPayloadAsEscapedJson(parsed[3]));
//payload
- newLineBuilder.append("\n");
- writer.write(newLineBuilder.toString());
+ String newLine = parsed[0] + "," //id
+ + parsed[1] + "," //dataSource
+ + parsed[2] + "," //version
+ + rewriteHexPayloadAsEscapedJson(parsed[3]) //payload
+ + "\n";
+ writer.write(newLine);
}
}
@@ -334,11 +332,10 @@ public class ExportMetadata extends GuiceRunnable
while ((line = reader.readLine()) != null) {
String[] parsed = PARSER.parseLine(line);
- StringBuilder newLineBuilder = new StringBuilder();
- newLineBuilder.append(parsed[0]).append(","); //name
- newLineBuilder.append(rewriteHexPayloadAsEscapedJson(parsed[1]));
//payload
- newLineBuilder.append("\n");
- writer.write(newLineBuilder.toString());
+ String newLine = parsed[0] + "," //name
+ + rewriteHexPayloadAsEscapedJson(parsed[1]) //payload
+ + "\n";
+ writer.write(newLine);
}
}
@@ -363,13 +360,12 @@ public class ExportMetadata extends GuiceRunnable
while ((line = reader.readLine()) != null) {
String[] parsed = PARSER.parseLine(line);
- StringBuilder newLineBuilder = new StringBuilder();
- newLineBuilder.append(parsed[0]).append(","); //id
- newLineBuilder.append(parsed[1]).append(","); //spec_id
- newLineBuilder.append(parsed[2]).append(","); //created_date
- newLineBuilder.append(rewriteHexPayloadAsEscapedJson(parsed[3]));
//payload
- newLineBuilder.append("\n");
- writer.write(newLineBuilder.toString());
+ String newLine = parsed[0] + "," //id
+ + parsed[1] + "," //spec_id
+ + parsed[2] + "," //created_date
+ + rewriteHexPayloadAsEscapedJson(parsed[3]) //payload
+ + "\n";
+ writer.write(newLine);
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]