loquisgon commented on code in PR #12392:
URL: https://github.com/apache/druid/pull/12392#discussion_r848951942
##########
integration-tests/src/test/java/org/apache/druid/tests/coordinator/duty/ITAutoCompactionTest.java:
##########
@@ -555,30 +577,169 @@ public void
testAutoCompactionDutyWithSegmentGranularityAndWithDropExistingTrue(
verifySegmentsCompacted(1, 1000);
checkCompactionIntervals(expectedIntervalAfterCompaction);
- newGranularity = Granularities.DAY;
+
+ LOG.info("Auto compaction test with MONTH segment granularity,
dropExisting is true");
+ // "interval": "2013-01-01T00:00:00.000Z/2014-01-01T00:00:00.000Z",
+ newGranularity = Granularities.MONTH;
// Set dropExisting to true
submitCompactionConfig(1000, NO_SKIP_OFFSET, new
UserCompactionTaskGranularityConfig(newGranularity, null, null), true);
- LOG.info("Auto compaction test with DAY segment granularity");
+ // Since dropExisting is set to true...
+ // Again data is only in two days
+ // The earlier segment with YEAR granularity will be completely covered,
overshadowed, by the
+ // new MONTH segments for data and tombstones for days with no data
+ // Hence, we will only have 2013-08 to 2013-09 months with data
+ // plus 12 tombstones
+ final List<String> intervalsAfterYEARCompactionButBeforeMONTHCompaction =
+ coordinator.getSegmentIntervals(fullDatasourceName);
+ expectedIntervalAfterCompaction = new ArrayList<>();
+ for (String interval :
intervalsAfterYEARCompactionButBeforeMONTHCompaction) {
+ for (Interval newinterval : newGranularity.getIterable(new
Interval(interval, ISOChronology.getInstanceUTC()))) {
+ expectedIntervalAfterCompaction.add(newinterval.toString());
+ }
+ }
+ forceTriggerAutoCompaction(12);
+ verifyQuery(INDEX_QUERIES_RESOURCE);
+ verifyTombstones(10);
+ verifySegmentsCompacted(12, 1000);
+ checkCompactionIntervals(expectedIntervalAfterCompaction);
+
+ LOG.info("Auto compaction test with SEMESTER segment granularity,
dropExisting is true, over tombstones");
+ // only reason is semester and not quarter or month is to minimize time
in the test but to
+ // ensure that one of the compactions compacts *only* tombstones. The
first semester will
+ // compact only tombstones, so it should be a tombstone itself.
+ newGranularity = new PeriodGranularity(new Period("P6M"), null,
DateTimeZone.UTC);
+ // Set dropExisting to true
+ submitCompactionConfig(1000, NO_SKIP_OFFSET, new
UserCompactionTaskGranularityConfig(newGranularity, null, null), true);
// Since dropExisting is set to true...
+ // The earlier 12 segments with MONTH granularity will be completely
covered, overshadowed, by the
+ // new PT6M segments for data and tombstones for days with no data
+ // Hence, we will have two segments, one tombstone for the first
semester and one data segment for the second.
+ forceTriggerAutoCompaction(2); // two semesters compacted
+ verifyQuery(INDEX_QUERIES_RESOURCE);
+ verifyTombstones(1);
+ verifySegmentsCompacted(2, 1000);
+
+ expectedIntervalAfterCompaction =
+ Arrays.asList("2013-01-01T00:00:00.000Z/2013-07-01T00:00:00.000Z",
+ "2013-07-01T00:00:00.000Z/2014-01-01T00:00:00.000Z"
+ );
+ checkCompactionIntervals(expectedIntervalAfterCompaction);
+
+ // verify that autocompaction completed nefore
Review Comment:
Fixed.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]