github-advanced-security[bot] commented on code in PR #16849:
URL: https://github.com/apache/druid/pull/16849#discussion_r1706147840
##########
extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQReplaceTest.java:
##########
@@ -194,6 +196,393 @@
.verifyResults();
}
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDim(String contextName,
Map<String, Object> context)
+ {
+ // Tests [CLUSTERED BY dim1] with the default useExplicitSegmentSortOrder
(false). In this case,
+ // partitioning uses [dim1] but segment sort uses [__time, dim1].
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(context)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{978393600000L, "def", 5.0f},
+ new Object[]{978480000000L, "abc", 6.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setDimensionExclusions(Collections.singletonList("__time"))
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimExplicitSort(String
contextName, Map<String, Object> context)
Review Comment:
## Useless parameter
The parameter 'contextName' is never used.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7679)
##########
processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java:
##########
@@ -887,19 +942,23 @@
* Currently called to initialize IncrementalIndex dimension order during
index creation
* Index dimension ordering could be changed to initialize from
DimensionsSpec after resolution of
* https://github.com/apache/druid/issues/2011
+ *
+ * @param oldDimensionOrder dimension order to initialize
+ * @param oldColumnFormats formats for the dimensions
*/
public void loadDimensionIterable(
Iterable<String> oldDimensionOrder,
- Map<String, ColumnFormat> oldColumnCapabilities
+ Map<String, ColumnFormat> oldColumnFormats
)
{
synchronized (dimensionDescs) {
- if (!dimensionDescs.isEmpty()) {
- throw new ISE("Cannot load dimension order when existing order[%s] is
not empty.", dimensionDescs.keySet());
+ if (size() != 0) {
+ throw new ISE("Cannot load dimension order when existing index is not
empty.", dimensionDescs.keySet());
Review Comment:
## Unused format argument
This format call refers to 0 argument(s) but supplies 1 argument(s).
[Show more
details](https://github.com/apache/druid/security/code-scanning/7683)
##########
server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java:
##########
@@ -251,29 +254,124 @@
}
@Test
- public void testOverlapTimeAndDim()
+ public void testOverlapTimeAndDimPositionZero()
{
- expectedException.expect(IllegalArgumentException.class);
+ DataSchema schema = new DataSchema(
+ IdUtilsTest.VALID_ID_CHARS,
+ new TimestampSpec("time", "auto", null),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new LongDimensionSchema("__time"),
+ new StringDimensionSchema("dimA"),
+ new StringDimensionSchema("dimB")
+ )
+ )
+ .setDimensionExclusions(ImmutableList.of("dimC"))
+ .build(),
+ null,
+ new ArbitraryGranularitySpec(Granularities.DAY,
ImmutableList.of(Intervals.of("2014/2015"))),
+ null,
+ null,
+ jsonMapper
+ );
+
+ Assert.assertEquals(
+ ImmutableList.of("__time", "dimA", "dimB"),
+ schema.getDimensionsSpec().getDimensionNames()
+ );
+
+
Assert.assertFalse(schema.getDimensionsSpec().isUseExplicitSegmentSortOrder());
+ }
+
+ @Test
+ public void testOverlapTimeAndDimPositionZeroWrongType()
+ {
+ expectedException.expect(DruidException.class);
+ expectedException.expectMessage("Encountered dimension[__time] with
incorrect type[STRING]. Type must be 'long'.");
+
+ DataSchema schema = new DataSchema(
+ IdUtilsTest.VALID_ID_CHARS,
+ new TimestampSpec("time", "auto", null),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new StringDimensionSchema("__time"),
+ new StringDimensionSchema("dimA"),
+ new StringDimensionSchema("dimB")
+ )
+ )
+ .setDimensionExclusions(ImmutableList.of("dimC"))
+ .build(),
+ null,
+ new ArbitraryGranularitySpec(Granularities.DAY,
ImmutableList.of(Intervals.of("2014/2015"))),
+ null,
+ null,
+ jsonMapper
+ );
Review Comment:
## Unread local variable
Variable 'DataSchema schema' is never read.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7684)
##########
extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQReplaceTest.java:
##########
@@ -194,6 +196,393 @@
.verifyResults();
}
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDim(String contextName,
Map<String, Object> context)
+ {
+ // Tests [CLUSTERED BY dim1] with the default useExplicitSegmentSortOrder
(false). In this case,
+ // partitioning uses [dim1] but segment sort uses [__time, dim1].
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(context)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{978393600000L, "def", 5.0f},
+ new Object[]{978480000000L, "abc", 6.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setDimensionExclusions(Collections.singletonList("__time"))
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeExplicitSort(String
contextName, Map<String, Object> context)
Review Comment:
## Useless parameter
The parameter 'contextName' is never used.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7678)
##########
processing/src/main/java/org/apache/druid/segment/TimeAndDimsPointer.java:
##########
@@ -148,19 +177,23 @@
@Override
public int compareTo(@Nonnull TimeAndDimsPointer rhs)
{
- long timestamp = getTimestamp();
- long rhsTimestamp = rhs.getTimestamp();
- int timestampDiff = Long.compare(timestamp, rhsTimestamp);
- if (timestampDiff != 0) {
- return timestampDiff;
- }
- for (int dimIndex = 0; dimIndex < dimensionSelectors.length; dimIndex++) {
- int dimDiff = dimensionSelectorComparators[dimIndex].compare(
- dimensionSelectors[dimIndex],
- rhs.dimensionSelectors[dimIndex]
- );
- if (dimDiff != 0) {
- return dimDiff;
+ for (int index = 0; index < (dimensionSelectors.length + 1); index++) {
+ if (index == timePosition) {
+ final long timestamp = getTimestamp();
+ final long rhsTimestamp = rhs.getTimestamp();
+ final int timestampDiff = Long.compare(timestamp, rhsTimestamp);
+ if (timestampDiff != 0) {
+ return timestampDiff;
+ }
+ } else {
+ final int dimIndex = index < timePosition ? index : index - 1;
+ final int dimDiff = dimensionSelectorComparators[dimIndex].compare(
+ dimensionSelectors[dimIndex],
Review Comment:
## Array index out of bounds
This array access might be out of bounds, as the index might be equal to the
array length.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7682)
##########
extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQReplaceTest.java:
##########
@@ -194,6 +196,393 @@
.verifyResults();
}
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDim(String contextName,
Map<String, Object> context)
Review Comment:
## Useless parameter
The parameter 'contextName' is never used.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7680)
##########
extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQReplaceTest.java:
##########
@@ -194,6 +196,393 @@
.verifyResults();
}
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDim(String contextName,
Map<String, Object> context)
+ {
+ // Tests [CLUSTERED BY dim1] with the default useExplicitSegmentSortOrder
(false). In this case,
+ // partitioning uses [dim1] but segment sort uses [__time, dim1].
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(context)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{978393600000L, "def", 5.0f},
+ new Object[]{978480000000L, "abc", 6.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setDimensionExclusions(Collections.singletonList("__time"))
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ // Tests that [CLUSTERED BY dim1, __time] and [CLUSTERED BY dim1] are same
when useExplicitSegmentSortOrder = true.
+ // (Same expectations as the prior test,
testReplaceOnFooWithAllClusteredByDimExplicitSort.)
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1, __time")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeError(String
contextName, Map<String, Object> context)
+ {
+ // Tests that [CLUSTERED BY dim1, __time] is an error when
useExplicitSegmentSortOrder = false (the default).
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1, __time")
+ .setExpectedDataSource("foo")
+ .setQueryContext(context)
+ .setExpectedValidationErrorMatcher(invalidSqlContains(
+ "Sort order (CLUSTERED BY) cannot include[__time] in
position[1] unless context "
+ + "parameter[useExplicitSegmentSortOrder] is set
to[true]."
+ ))
+ .verifyPlanningErrors();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeError2(String
contextName, Map<String, Object> context)
Review Comment:
## Useless parameter
The parameter 'contextName' is never used.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7676)
##########
extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQReplaceTest.java:
##########
@@ -194,6 +196,393 @@
.verifyResults();
}
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDim(String contextName,
Map<String, Object> context)
+ {
+ // Tests [CLUSTERED BY dim1] with the default useExplicitSegmentSortOrder
(false). In this case,
+ // partitioning uses [dim1] but segment sort uses [__time, dim1].
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(context)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{978393600000L, "def", 5.0f},
+ new Object[]{978480000000L, "abc", 6.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setDimensionExclusions(Collections.singletonList("__time"))
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ // Tests that [CLUSTERED BY dim1, __time] and [CLUSTERED BY dim1] are same
when useExplicitSegmentSortOrder = true.
+ // (Same expectations as the prior test,
testReplaceOnFooWithAllClusteredByDimExplicitSort.)
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1, __time")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeError(String
contextName, Map<String, Object> context)
Review Comment:
## Useless parameter
The parameter 'contextName' is never used.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7677)
##########
extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQReplaceTest.java:
##########
@@ -194,6 +196,393 @@
.verifyResults();
}
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDim(String contextName,
Map<String, Object> context)
+ {
+ // Tests [CLUSTERED BY dim1] with the default useExplicitSegmentSortOrder
(false). In this case,
+ // partitioning uses [dim1] but segment sort uses [__time, dim1].
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(context)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{978393600000L, "def", 5.0f},
+ new Object[]{978480000000L, "abc", 6.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setDimensionExclusions(Collections.singletonList("__time"))
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeExplicitSort(String
contextName, Map<String, Object> context)
+ {
+ // Tests that [CLUSTERED BY dim1, __time] and [CLUSTERED BY dim1] are same
when useExplicitSegmentSortOrder = true.
+ // (Same expectations as the prior test,
testReplaceOnFooWithAllClusteredByDimExplicitSort.)
+ RowSignature rowSignature = RowSignature.builder()
+ .add("__time", ColumnType.LONG)
+ .add("dim1", ColumnType.STRING)
+ .add("m1", ColumnType.FLOAT)
+ .build();
+
+ DataSegment existingDataSegment0 = DataSegment.builder()
+
.interval(Intervals.of("2000-01-01T/2000-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ DataSegment existingDataSegment1 = DataSegment.builder()
+
.interval(Intervals.of("2001-01-01T/2001-01-04T"))
+ .size(50)
+
.version(MSQTestTaskActionClient.VERSION)
+ .dataSource("foo")
+ .build();
+
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(DimensionsSpec.PARAMETER_EXPLICIT_SORT_ORDER, true);
+
+ Mockito.doCallRealMethod()
+ .doReturn(ImmutableSet.of(existingDataSegment0,
existingDataSegment1))
+ .when(testTaskActionClient)
+ .submit(new RetrieveUsedSegmentsAction(
+ EasyMock.eq("foo"),
+ EasyMock.eq(ImmutableList.of(Intervals.ETERNITY))
+ ));
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1, __time")
+ .setExpectedDataSource("foo")
+ .setExpectedRowSignature(rowSignature)
+ .setQueryContext(queryContext)
+ .setExpectedDestinationIntervals(Intervals.ONLY_ETERNITY)
+ .setExpectedSegments(
+ ImmutableSet.of(
+ SegmentId.of("foo", Intervals.ETERNITY, "test", 0)
+ )
+ )
+ .setExpectedShardSpec(DimensionRangeShardSpec.class)
+ .setExpectedResultRows(
+ ImmutableList.of(
+ new Object[]{946684800000L, "", 1.0f},
+ new Object[]{978307200000L, "1", 4.0f},
+ new Object[]{946771200000L, "10.1", 2.0f},
+ new Object[]{946857600000L, "2", 3.0f},
+ new Object[]{978480000000L, "abc", 6.0f},
+ new Object[]{978393600000L, "def", 5.0f}
+ )
+ )
+
.setExpectedSegmentGenerationProgressCountersForStageWorker(
+ CounterSnapshotMatcher
+ .with().segmentRowsProcessed(6),
+ 1, 0
+ )
+ .setExpectedLastCompactionState(
+ expectedCompactionState(
+ context,
+ Collections.singletonList("dim1"),
+ DimensionsSpec.builder()
+ .setDimensions(
+ ImmutableList.of(
+ new
StringDimensionSchema("dim1"),
+ new
LongDimensionSchema("__time"),
+ new
FloatDimensionSchema("m1")
+ )
+ )
+
.setUseExplicitSegmentSortOrder(true)
+ .build(),
+ GranularityType.ALL,
+ Intervals.ETERNITY
+ )
+ )
+ .verifyResults();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeError(String
contextName, Map<String, Object> context)
+ {
+ // Tests that [CLUSTERED BY dim1, __time] is an error when
useExplicitSegmentSortOrder = false (the default).
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1, __time")
+ .setExpectedDataSource("foo")
+ .setQueryContext(context)
+ .setExpectedValidationErrorMatcher(invalidSqlContains(
+ "Sort order (CLUSTERED BY) cannot include[__time] in
position[1] unless context "
+ + "parameter[useExplicitSegmentSortOrder] is set
to[true]."
+ ))
+ .verifyPlanningErrors();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByDimThenTimeError2(String
contextName, Map<String, Object> context)
+ {
+ // Tests that setting segmentSortOrder = [dim1, __time] is an error when
+ // useExplicitSegmentSortOrder = false (the default).
+ Map<String, Object> queryContext = new HashMap<>(context);
+ queryContext.put(MultiStageQueryContext.CTX_SORT_ORDER, "dim1, __time");
+
+ testIngestQuery().setSql(" REPLACE INTO foo OVERWRITE ALL "
+ + "SELECT __time, dim1, m1 "
+ + "FROM foo "
+ + "PARTITIONED BY ALL "
+ + "CLUSTERED BY dim1")
+ .setExpectedDataSource("foo")
+ .setQueryContext(queryContext)
+ .setExpectedValidationErrorMatcher(invalidSqlContains(
+ "Context parameter[segmentSortOrder] must start
with[__time] unless context "
+ + "parameter[useExplicitSegmentSortOrder] is set
to[true]."
+ ))
+ .verifyPlanningErrors();
+ }
+
+ @MethodSource("data")
+ @ParameterizedTest(name = "{index}:with context {0}")
+ public void testReplaceOnFooWithAllClusteredByTimeThenDimExplicitSort(String
contextName, Map<String, Object> context)
Review Comment:
## Useless parameter
The parameter 'contextName' is never used.
[Show more
details](https://github.com/apache/druid/security/code-scanning/7675)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]