This is an automated email from the ASF dual-hosted git repository.
maytasm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git
The following commit(s) were added to refs/heads/master by this push:
new eabad0f Keep query granularity of compacted segments after compaction
(#10856)
eabad0f is described below
commit eabad0fb352ee7f1f32658edb95d236fe1837662
Author: Agustin Gonzalez <[email protected]>
AuthorDate: Thu Feb 18 02:35:10 2021 -0700
Keep query granularity of compacted segments after compaction (#10856)
* Keep query granularity of compacted segments after compaction
* Protect against null isRollup
* Fix bugspot check RC_REF_COMPARISON_BAD_PRACTICE_BOOLEAN & edit an
existing comment
* Make sure that NONE is also included when comparing for the finer
granularity
* Update integration test check for segment size due to query granularity
propagation affecting size
* Minor code cleanup
* Added functional test to verify queryGranlarity after compaction
* Minor style fix
* Update unit tests
---
.../java/util/common/granularity/Granularity.java | 25 ++++++
.../druid/java/util/common/GranularityTest.java | 22 ++++++
.../druid/indexing/common/task/CompactionTask.java | 89 +++++++++++++++++-----
.../common/task/CompactionTaskParallelRunTest.java | 8 +-
.../common/task/CompactionTaskRunTest.java | 14 ++--
.../indexing/common/task/CompactionTaskTest.java | 55 +++++++++++++
.../testing/utils/AbstractQueryWithResults.java | 17 ++++-
.../testing/utils/AbstractTestQueryHelper.java | 5 +-
.../druid/testing/utils/QueryResultVerifier.java | 16 +++-
.../druid/testing/utils/QueryWithResults.java | 5 +-
.../druid/testing/utils/SqlQueryWithResults.java | 3 +-
.../coordinator/duty/ITAutoCompactionTest.java | 4 +-
.../druid/tests/indexer/ITCompactionTaskTest.java | 37 ++++++++-
.../query/ITQueryRetryTestOnMissingSegments.java | 16 +++-
.../resources/indexer/segment_metadata_qr2.json | 23 ++++++
.../resources/indexer/segment_metadata_qr4.json | 29 +++++++
16 files changed, 321 insertions(+), 47 deletions(-)
diff --git
a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
index 5d130b0..1046b85 100644
---
a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
+++
b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java
@@ -31,6 +31,7 @@ import org.joda.time.Interval;
import org.joda.time.format.DateTimeFormatter;
import java.util.ArrayList;
+import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
@@ -40,6 +41,30 @@ import java.util.regex.Pattern;
public abstract class Granularity implements Cacheable
{
+
+ public static Comparator<Granularity> IS_FINER_THAN = new
Comparator<Granularity>()
+ {
+ @Override
+ /**
+ * Decide whether this granularity is finer than the other granularity
+ *
+ * @param left The left granularity
+ * @param right The right granularity
+ * @return -1 if left granularity is finer, 0 if it is the same, 1 if it
is greater
+ */
+ public int compare(Granularity left, Granularity right)
+ {
+ long leftDuration = left.bucket(DateTimes.EPOCH).toDurationMillis();
+ long rightDuration = right.bucket(DateTimes.EPOCH).toDurationMillis();
+ if (leftDuration < rightDuration) {
+ return -1;
+ } else if (leftDuration == rightDuration) {
+ return 0;
+ } else {
+ return 1;
+ }
+ }
+ };
/**
* Default patterns for parsing paths.
*/
diff --git
a/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java
b/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java
index 76fd930..6551aea 100644
--- a/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java
+++ b/core/src/test/java/org/apache/druid/java/util/common/GranularityTest.java
@@ -41,6 +41,7 @@ import java.util.stream.StreamSupport;
public class GranularityTest
{
+ final Granularity NONE = Granularities.NONE;
final Granularity SECOND = Granularities.SECOND;
final Granularity MINUTE = Granularities.MINUTE;
final Granularity HOUR = Granularities.HOUR;
@@ -50,6 +51,7 @@ public class GranularityTest
final Granularity WEEK = Granularities.WEEK;
final Granularity MONTH = Granularities.MONTH;
final Granularity YEAR = Granularities.YEAR;
+ final Granularity ALL = Granularities.ALL;
@Test
public void testHiveFormat()
@@ -809,6 +811,26 @@ public class GranularityTest
);
}
+ @Test
+ public void testIsFinerComparator()
+ {
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, SECOND) < 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(SECOND, NONE) > 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, MINUTE) < 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(MINUTE, NONE) > 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, MONTH) < 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(Granularities.YEAR,
ALL) < 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(Granularities.ALL,
YEAR) > 0);
+ // Distinct references are needed to avoid intelli-j complain about
compare being called on itself
+ // thus the variables
+ Granularity day = DAY;
+ Granularity none = NONE;
+ Granularity all = ALL;
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(DAY, day) == 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(NONE, none) == 0);
+ Assert.assertTrue(Granularity.IS_FINER_THAN.compare(ALL, all) == 0);
+ }
+
private static class PathDate
{
public final String path;
diff --git
a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
index 5d971c3..276b176 100644
---
a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
+++
b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java
@@ -32,6 +32,7 @@ import com.google.common.collect.Lists;
import org.apache.curator.shaded.com.google.common.base.Verify;
import org.apache.druid.client.coordinator.CoordinatorClient;
import org.apache.druid.client.indexing.ClientCompactionTaskQuery;
+import org.apache.druid.common.guava.SettableSupplier;
import org.apache.druid.data.input.InputSource;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionSchema.MultiValueHandling;
@@ -65,7 +66,6 @@ import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.java.util.common.NonnullPair;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.StringUtils;
-import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.granularity.GranularityType;
import org.apache.druid.java.util.common.guava.Comparators;
@@ -642,21 +642,13 @@ public class CompactionTask extends AbstractBatchIndexTask
Granularity segmentGranularity
)
{
- // check index metadata
- for (NonnullPair<QueryableIndex, DataSegment> pair :
queryableIndexAndSegments) {
- final QueryableIndex index = pair.lhs;
- if (index.getMetadata() == null) {
- throw new RE("Index metadata doesn't exist for segment[%s]",
pair.rhs.getId());
- }
- }
+ // check index metadata &
+ // Decide which values to propagate (i.e. carry over) for rollup &
queryGranularity
+ final SettableSupplier<Boolean> rollup = new SettableSupplier<>();
+ final SettableSupplier<Granularity> queryGranularity = new
SettableSupplier<>();
+ decideRollupAndQueryGranularityCarryOver(rollup, queryGranularity,
queryableIndexAndSegments);
// find granularity spec
- // set rollup only if rollup is set for all segments
- final boolean rollup = queryableIndexAndSegments.stream().allMatch(pair ->
{
- // We have already checked getMetadata() doesn't return null
- final Boolean isRollup = pair.lhs.getMetadata().isRollup();
- return isRollup != null && isRollup;
- });
final Interval totalInterval = JodaUtils.umbrellaInterval(
queryableIndexAndSegments.stream().map(p ->
p.rhs.getInterval()).collect(Collectors.toList())
@@ -664,8 +656,8 @@ public class CompactionTask extends AbstractBatchIndexTask
final GranularitySpec granularitySpec = new UniformGranularitySpec(
Preconditions.checkNotNull(segmentGranularity),
- Granularities.NONE,
- rollup,
+ queryGranularity.get(),
+ rollup.get(),
Collections.singletonList(totalInterval)
);
@@ -677,7 +669,8 @@ public class CompactionTask extends AbstractBatchIndexTask
?
createMetricsSpec(queryableIndexAndSegments)
:
convertToCombiningFactories(metricsSpec);
- return new DataSchema(
+ return new
+ DataSchema(
dataSource,
new TimestampSpec(null, null, null),
finalDimensionsSpec,
@@ -687,6 +680,64 @@ public class CompactionTask extends AbstractBatchIndexTask
);
}
+
+ /**
+ * Decide which rollup & queryCardinalities to propage for the compacted
segment based on
+ * the data segments given
+ *
+ * @param rollup Reference to update with the rollup value
+ * @param queryGranularity Reference to update with the
queryGranularity value
+ * @param queryableIndexAndSegments The segments to compact
+ */
+ private static void decideRollupAndQueryGranularityCarryOver(
+ SettableSupplier<Boolean> rollup,
+ SettableSupplier<Granularity> queryGranularity,
+ List<NonnullPair<QueryableIndex, DataSegment>> queryableIndexAndSegments
+ )
+ {
+ final SettableSupplier<Boolean> rollupIsValid = new
SettableSupplier<>(true);
+ for (NonnullPair<QueryableIndex, DataSegment> pair :
queryableIndexAndSegments) {
+ final QueryableIndex index = pair.lhs;
+ if (index.getMetadata() == null) {
+ throw new RE("Index metadata doesn't exist for segment[%s]",
pair.rhs.getId());
+ }
+ // carry-overs (i.e. query granularity & rollup) are valid iff they are
the same in every segment:
+
+ // Pick rollup value if all segments being compacted have the same,
non-null, value otherwise set it to false
+ if (rollupIsValid.get()) {
+ Boolean isRollup = index.getMetadata().isRollup();
+ if (isRollup == null) {
+ rollupIsValid.set(false);
+ rollup.set(false);
+ } else if (rollup.get() == null) {
+ rollup.set(isRollup);
+ } else if (!rollup.get().equals(isRollup.booleanValue())) {
+ rollupIsValid.set(false);
+ rollup.set(false);
+ }
+ }
+
+ // Pick the finer, non-null, of the query granularities of the segments
being compacted
+ Granularity current = index.getMetadata().getQueryGranularity();
+ queryGranularity.set(compareWithCurrent(queryGranularity.get(),
current));
+ }
+ }
+
+ @VisibleForTesting
+ static Granularity compareWithCurrent(Granularity queryGranularity,
Granularity current)
+ {
+ if (queryGranularity == null && current != null) {
+ queryGranularity = current;
+ } else if (queryGranularity != null
+ && current != null
+ && Granularity.IS_FINER_THAN.compare(current, queryGranularity)
< 0) {
+ queryGranularity = current;
+ }
+ // we never propagate nulls when there is at least one non-null
granularity thus
+ // do nothing for the case queryGranularity != null && current == null
+ return queryGranularity;
+ }
+
private static AggregatorFactory[] createMetricsSpec(
List<NonnullPair<QueryableIndex, DataSegment>> queryableIndexAndSegments
)
@@ -881,8 +932,8 @@ public class CompactionTask extends AbstractBatchIndexTask
ParallelIndexTuningConfig computeTuningConfig()
{
ParallelIndexTuningConfig newTuningConfig = tuningConfig == null
- ?
ParallelIndexTuningConfig.defaultConfig()
- : tuningConfig;
+ ?
ParallelIndexTuningConfig.defaultConfig()
+ : tuningConfig;
PartitionsSpec partitionsSpec =
newTuningConfig.getGivenOrDefaultPartitionsSpec();
if (partitionsSpec instanceof DynamicPartitionsSpec) {
final DynamicPartitionsSpec dynamicPartitionsSpec =
(DynamicPartitionsSpec) partitionsSpec;
diff --git
a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
index 7fe8781..2e23fca 100644
---
a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
+++
b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java
@@ -157,7 +157,7 @@ public class CompactionTaskParallelRunTest extends
AbstractParallelIndexSupervis
getObjectMapper().writeValueAsString(
new UniformGranularitySpec(
Granularities.HOUR,
- Granularities.NONE,
+ Granularities.MINUTE,
true,
ImmutableList.of(segment.getInterval())
)
@@ -197,7 +197,7 @@ public class CompactionTaskParallelRunTest extends
AbstractParallelIndexSupervis
getObjectMapper().writeValueAsString(
new UniformGranularitySpec(
Granularities.HOUR,
- Granularities.NONE,
+ Granularities.MINUTE,
true,
ImmutableList.of(segment.getInterval())
)
@@ -237,7 +237,7 @@ public class CompactionTaskParallelRunTest extends
AbstractParallelIndexSupervis
getObjectMapper().writeValueAsString(
new UniformGranularitySpec(
Granularities.HOUR,
- Granularities.NONE,
+ Granularities.MINUTE,
true,
ImmutableList.of(segment.getInterval())
)
@@ -277,7 +277,7 @@ public class CompactionTaskParallelRunTest extends
AbstractParallelIndexSupervis
getObjectMapper().writeValueAsString(
new UniformGranularitySpec(
Granularities.HOUR,
- Granularities.NONE,
+ Granularities.MINUTE,
true,
ImmutableList.of(segment.getInterval())
)
diff --git
a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
index bd1f819..0e28772 100644
---
a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
+++
b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java
@@ -248,7 +248,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
segments.get(i).getInterval()
);
Assert.assertEquals(
- getDefaultCompactionState(Granularities.HOUR, Granularities.NONE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
+ getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
segments.get(i).getLastCompactionState()
);
if (lockGranularity == LockGranularity.SEGMENT) {
@@ -339,7 +339,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
getObjectMapper().writeValueAsString(
new UniformGranularitySpec(
Granularities.HOUR,
- Granularities.NONE,
+ Granularities.MINUTE,
true,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))
)
@@ -385,7 +385,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
segments.get(i).getInterval()
);
Assert.assertEquals(
- getDefaultCompactionState(Granularities.HOUR, Granularities.NONE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
+ getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
segments.get(i).getLastCompactionState()
);
if (lockGranularity == LockGranularity.SEGMENT) {
@@ -415,7 +415,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
segments.get(i).getInterval()
);
Assert.assertEquals(
- getDefaultCompactionState(Granularities.HOUR, Granularities.NONE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
+ getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
segments.get(i).getLastCompactionState()
);
if (lockGranularity == LockGranularity.SEGMENT) {
@@ -517,7 +517,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
segments.get(i).getInterval()
);
Assert.assertEquals(
- getDefaultCompactionState(Granularities.HOUR, Granularities.NONE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
+ getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE,
ImmutableList.of(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00", i, i
+ 1))),
segments.get(i).getLastCompactionState()
);
if (lockGranularity == LockGranularity.SEGMENT) {
@@ -559,7 +559,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
Assert.assertEquals(Intervals.of("2014-01-01/2014-01-02"),
segments.get(0).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1),
segments.get(0).getShardSpec());
Assert.assertEquals(
- getDefaultCompactionState(Granularities.DAY, Granularities.NONE,
ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))),
+ getDefaultCompactionState(Granularities.DAY, Granularities.MINUTE,
ImmutableList.of(Intervals.of("2014-01-01T00:00:00/2014-01-01T03:00:00"))),
segments.get(0).getLastCompactionState()
);
@@ -580,7 +580,7 @@ public class CompactionTaskRunTest extends IngestionTestBase
Assert.assertEquals(Intervals.of("2014-01-01T0%d:00:00/2014-01-01T0%d:00:00",
i, i + 1), segments.get(i).getInterval());
Assert.assertEquals(new NumberedShardSpec(0, 1),
segments.get(i).getShardSpec());
Assert.assertEquals(
- getDefaultCompactionState(Granularities.HOUR, Granularities.NONE,
ImmutableList.of(Intervals.of("2014-01-01/2014-01-02"))),
+ getDefaultCompactionState(Granularities.HOUR, Granularities.MINUTE,
ImmutableList.of(Intervals.of("2014-01-01/2014-01-02"))),
segments.get(i).getLastCompactionState()
);
}
diff --git
a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java
b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java
index 71b603e..0cc831a 100644
---
a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java
+++
b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java
@@ -38,6 +38,7 @@ import com.google.common.collect.Maps;
import org.apache.druid.client.coordinator.CoordinatorClient;
import org.apache.druid.client.indexing.IndexingServiceClient;
import org.apache.druid.client.indexing.NoopIndexingServiceClient;
+import org.apache.druid.common.guava.SettableSupplier;
import org.apache.druid.data.input.InputSource;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
@@ -1116,6 +1117,60 @@ public class CompactionTaskTest
);
}
+ @Test
+ public void testChooseFinestGranularityWithNulls()
+ {
+ List<Granularity> input = Arrays.asList(
+ Granularities.DAY,
+ Granularities.SECOND,
+ Granularities.MINUTE,
+ Granularities.SIX_HOUR,
+ Granularities.DAY,
+ null,
+ Granularities.ALL,
+ Granularities.MINUTE
+ );
+
Assert.assertTrue(Granularities.SECOND.equals(chooseFinestGranularityHelper(input)));
+ }
+
+ @Test
+ public void testChooseFinestGranularityNone()
+ {
+ List<Granularity> input = ImmutableList.of(
+ Granularities.DAY,
+ Granularities.SECOND,
+ Granularities.MINUTE,
+ Granularities.SIX_HOUR,
+ Granularities.NONE,
+ Granularities.DAY,
+ Granularities.NONE,
+ Granularities.MINUTE
+ );
+
Assert.assertTrue(Granularities.NONE.equals(chooseFinestGranularityHelper(input)));
+ }
+
+ @Test
+ public void testChooseFinestGranularityAllNulls()
+ {
+ List<Granularity> input = Arrays.asList(
+ null,
+ null,
+ null,
+ null
+ );
+ Assert.assertNull(chooseFinestGranularityHelper(input));
+ }
+
+ private Granularity chooseFinestGranularityHelper(List<Granularity>
granularities)
+ {
+ SettableSupplier<Granularity> queryGranularity = new SettableSupplier<>();
+ for (Granularity current : granularities) {
+
queryGranularity.set(CompactionTask.compareWithCurrent(queryGranularity.get(),
current));
+ }
+ return queryGranularity.get();
+ }
+
+
private static List<DimensionsSpec>
getExpectedDimensionsSpecForAutoGeneration()
{
return ImmutableList.of(
diff --git
a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java
b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java
index d3dd2b0..029d001 100644
---
a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java
+++
b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractQueryWithResults.java
@@ -22,6 +22,7 @@ package org.apache.druid.testing.utils;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -29,15 +30,22 @@ public class AbstractQueryWithResults<QueryType>
{
private final QueryType query;
private final List<Map<String, Object>> expectedResults;
+ private final List<String> fieldsToTest;
@JsonCreator
public AbstractQueryWithResults(
@JsonProperty("query") QueryType query,
- @JsonProperty("expectedResults") List<Map<String, Object>>
expectedResults
+ @JsonProperty("expectedResults") List<Map<String, Object>>
expectedResults,
+ @JsonProperty("fieldsToTest") List<String> fieldsToTest
)
{
this.query = query;
this.expectedResults = expectedResults;
+ if (fieldsToTest != null) {
+ this.fieldsToTest = fieldsToTest;
+ } else {
+ this.fieldsToTest = Collections.emptyList();
+ }
}
@JsonProperty
@@ -52,12 +60,19 @@ public class AbstractQueryWithResults<QueryType>
return expectedResults;
}
+ @JsonProperty
+ public List<String> getFieldsToTest()
+ {
+ return fieldsToTest;
+ }
+
@Override
public String toString()
{
return "QueryWithResults{" +
"query=" + query +
", expectedResults=" + expectedResults +
+ ", fieldsToTest=" + fieldsToTest +
'}';
}
}
diff --git
a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java
b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java
index c224d7a..ebaed72 100644
---
a/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java
+++
b/integration-tests/src/main/java/org/apache/druid/testing/utils/AbstractTestQueryHelper.java
@@ -50,7 +50,6 @@ public abstract class AbstractTestQueryHelper<QueryResultType
extends AbstractQu
protected final String router;
protected final String routerTLS;
-
@Inject
AbstractTestQueryHelper(
ObjectMapper jsonMapper,
@@ -119,7 +118,9 @@ public abstract class
AbstractTestQueryHelper<QueryResultType extends AbstractQu
for (QueryResultType queryWithResult : queries) {
LOG.info("Running Query %s", queryWithResult.getQuery());
List<Map<String, Object>> result = queryClient.query(url,
queryWithResult.getQuery());
- if (!QueryResultVerifier.compareResults(result,
queryWithResult.getExpectedResults())) {
+ if (!QueryResultVerifier.compareResults(result,
queryWithResult.getExpectedResults(),
+ queryWithResult.getFieldsToTest()
+ )) {
LOG.error(
"Failed while executing query %s \n expectedResults: %s \n
actualResults : %s",
queryWithResult.getQuery(),
diff --git
a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java
b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java
index 6aa0638..2e25f11 100644
---
a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java
+++
b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryResultVerifier.java
@@ -20,13 +20,15 @@
package org.apache.druid.testing.utils;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
public class QueryResultVerifier
{
public static boolean compareResults(
Iterable<Map<String, Object>> actual,
- Iterable<Map<String, Object>> expected
+ Iterable<Map<String, Object>> expected,
+ List<String> fieldsToTest
)
{
Iterator<Map<String, Object>> actualIter = actual.iterator();
@@ -36,8 +38,16 @@ public class QueryResultVerifier
Map<String, Object> actualRes = actualIter.next();
Map<String, Object> expRes = expectedIter.next();
- if (!actualRes.equals(expRes)) {
- return false;
+ if (fieldsToTest != null && !fieldsToTest.isEmpty()) {
+ for (String field : fieldsToTest) {
+ if (!actualRes.get(field).equals(expRes.get(field))) {
+ return false;
+ }
+ }
+ } else {
+ if (!actualRes.equals(expRes)) {
+ return false;
+ }
}
}
diff --git
a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java
b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java
index 13476bd..b5fa706 100644
---
a/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java
+++
b/integration-tests/src/main/java/org/apache/druid/testing/utils/QueryWithResults.java
@@ -31,9 +31,10 @@ public class QueryWithResults extends
AbstractQueryWithResults<Query>
@JsonCreator
public QueryWithResults(
@JsonProperty("query") Query query,
- @JsonProperty("expectedResults") List<Map<String, Object>>
expectedResults
+ @JsonProperty("expectedResults") List<Map<String, Object>>
expectedResults,
+ @JsonProperty("fieldsTotest") List<String> fieldsToTest
)
{
- super(query, expectedResults);
+ super(query, expectedResults, fieldsToTest);
}
}
diff --git
a/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java
b/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java
index de727a6..2339f7d 100644
---
a/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java
+++
b/integration-tests/src/main/java/org/apache/druid/testing/utils/SqlQueryWithResults.java
@@ -22,6 +22,7 @@ package org.apache.druid.testing.utils;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.apache.druid.sql.http.SqlQuery;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -34,7 +35,7 @@ public class SqlQueryWithResults extends
AbstractQueryWithResults<SqlQuery>
List<Map<String, Object>> expectedResults
)
{
- super(query, expectedResults);
+ super(query, expectedResults, Collections.emptyList());
}
}
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
index 91bac02..1ccfa3f 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java
@@ -129,7 +129,7 @@ public class ITAutoCompactionTest extends
AbstractIndexerTest
fullDatasourceName,
AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING,
0,
- 22489,
+ 22482,
0,
0,
3,
@@ -275,7 +275,7 @@ public class ITAutoCompactionTest extends
AbstractIndexerTest
fullDatasourceName,
AutoCompactionSnapshot.AutoCompactionScheduleStatus.RUNNING,
0,
- 22489,
+ 22482,
0,
0,
3,
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
index f4ee559..b68ab77 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITCompactionTaskTest.java
@@ -54,6 +54,9 @@ public class ITCompactionTaskTest extends AbstractIndexerTest
private static final String INDEX_QUERIES_RESOURCE =
"/indexer/wikipedia_index_queries.json";
private static final String INDEX_DATASOURCE = "wikipedia_index_test";
+ private static final String SEGMENT_METADATA_QUERY_RESOURCE_QR4 =
"/indexer/segment_metadata_qr4.json";
+ private static final String SEGMENT_METADATA_QUERY_RESOURCE_QR2 =
"/indexer/segment_metadata_qr2.json";
+
private static final String COMPACTION_TASK =
"/indexer/wikipedia_compaction_task.json";
private static final String COMPACTION_TASK_WITH_SEGMENT_GRANULARITY =
"/indexer/wikipedia_compaction_task_with_segment_granularity.json";
private static final String COMPACTION_TASK_WITH_GRANULARITY_SPEC =
"/indexer/wikipedia_compaction_task_with_granularity_spec.json";
@@ -125,14 +128,14 @@ public class ITCompactionTaskTest extends
AbstractIndexerTest
fullDatasourceName
);
-
+ checkQueryGranularity(SEGMENT_METADATA_QUERY_RESOURCE_QR4);
queryHelper.testQueriesFromString(queryResponseTemplate);
compactData(compactionResource, newSegmentGranularity);
// The original 4 segments should be compacted into 2 new segments
checkNumberOfSegments(2);
queryHelper.testQueriesFromString(queryResponseTemplate);
-
+ checkQueryGranularity(SEGMENT_METADATA_QUERY_RESOURCE_QR2);
if (newSegmentGranularity != null) {
List<String> newIntervals = new ArrayList<>();
@@ -146,6 +149,7 @@ public class ITCompactionTaskTest extends
AbstractIndexerTest
checkCompactionIntervals(expectedIntervalAfterCompaction);
}
}
+
private void loadData(String indexTask) throws Exception
{
String taskSpec = getResourceAsString(indexTask);
@@ -182,6 +186,35 @@ public class ITCompactionTaskTest extends
AbstractIndexerTest
);
}
+ private void checkQueryGranularity(String queryResource) throws Exception
+ {
+ String queryResponseTemplate;
+ try {
+ InputStream is =
AbstractITBatchIndexTest.class.getResourceAsStream(queryResource);
+ queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
+ }
+ catch (IOException e) {
+ throw new ISE(e, "could not read query file: %s", queryResource);
+ }
+
+ queryResponseTemplate = StringUtils.replace(
+ queryResponseTemplate,
+ "%%DATASOURCE%%",
+ fullDatasourceName
+ );
+ queryResponseTemplate = StringUtils.replace(
+ queryResponseTemplate,
+ "%%ANALYSIS_TYPE%%",
+ "queryGranularity"
+ );
+ queryResponseTemplate = StringUtils.replace(
+ queryResponseTemplate,
+ "%%INTERVALS%%",
+ "2013-08-31/2013-09-02"
+ );
+ queryHelper.testQueriesFromString(queryResponseTemplate);
+ }
+
private void checkNumberOfSegments(int numExpectedSegments)
{
ITRetryUtil.retryUntilTrue(
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java
b/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java
index 961418b..e4842a4 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/query/ITQueryRetryTestOnMissingSegments.java
@@ -54,7 +54,7 @@ import java.util.Map;
* test spawns two historicals, a normal historical and a historical modified
for testing. The later historical
* announces all segments assigned, but doesn't serve all of them. Instead, it
can report missing segments for some
* segments. See {@link ServerManagerForQueryRetryTest} for more details.
- *
+ * <p>
* To run this test properly, the test group must be specified as {@link
TestNGGroup#QUERY_RETRY}.
*/
@Test(groups = TestNGGroup.QUERY_RETRY)
@@ -134,7 +134,9 @@ public class ITQueryRetryTestOnMissingSegments
{
final List<QueryWithResults> queries = jsonMapper.readValue(
queryWithResultsStr,
- new TypeReference<List<QueryWithResults>>() {}
+ new TypeReference<List<QueryWithResults>>()
+ {
+ }
);
testQueries(queries, expectation);
}
@@ -156,9 +158,15 @@ public class ITQueryRetryTestOnMissingSegments
List<Map<String, Object>> result = jsonMapper.readValue(
responseHolder.getContent(),
- new TypeReference<List<Map<String, Object>>>() {}
+ new TypeReference<List<Map<String, Object>>>()
+ {
+ }
);
- if (!QueryResultVerifier.compareResults(result,
queryWithResult.getExpectedResults())) {
+ if (!QueryResultVerifier.compareResults(
+ result,
+ queryWithResult.getExpectedResults(),
+ queryWithResult.getFieldsToTest()
+ )) {
if (expectation != Expectation.INCORRECT_RESULT) {
throw new ISE(
"Incorrect query results for query %s \n expectedResults: %s
\n actualResults : %s",
diff --git
a/integration-tests/src/test/resources/indexer/segment_metadata_qr2.json
b/integration-tests/src/test/resources/indexer/segment_metadata_qr2.json
new file mode 100644
index 0000000..948760d
--- /dev/null
+++ b/integration-tests/src/test/resources/indexer/segment_metadata_qr2.json
@@ -0,0 +1,23 @@
+[
+ {
+ "query": {
+ "queryType": "segmentMetadata",
+ "dataSource": "%%DATASOURCE%%",
+ "analysisTypes": [
+ "%%ANALYSIS_TYPE%%"
+ ],
+ "intervals": [
+ "%%INTERVALS%%"
+ ]
+ },
+ "expectedResults": [
+ {
+ "queryGranularity": "SECOND"
+ },
+ {
+ "queryGranularity": "SECOND"
+ }
+ ],
+ "fieldsToTest": ["queryGranularity"]
+ }
+]
diff --git
a/integration-tests/src/test/resources/indexer/segment_metadata_qr4.json
b/integration-tests/src/test/resources/indexer/segment_metadata_qr4.json
new file mode 100644
index 0000000..180c692
--- /dev/null
+++ b/integration-tests/src/test/resources/indexer/segment_metadata_qr4.json
@@ -0,0 +1,29 @@
+[
+ {
+ "query": {
+ "queryType": "segmentMetadata",
+ "dataSource": "%%DATASOURCE%%",
+ "analysisTypes": [
+ "%%ANALYSIS_TYPE%%"
+ ],
+ "intervals": [
+ "%%INTERVALS%%"
+ ]
+ },
+ "expectedResults": [
+ {
+ "queryGranularity": "SECOND"
+ },
+ {
+ "queryGranularity": "SECOND"
+ },
+ {
+ "queryGranularity": "SECOND"
+ },
+ {
+ "queryGranularity": "SECOND"
+ }
+ ],
+ "fieldsToTest": ["queryGranularity"]
+ }
+]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]