kfaraz commented on code in PR #19051:
URL: https://github.com/apache/druid/pull/19051#discussion_r2850726494


##########
indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResource.java:
##########
@@ -351,6 +357,80 @@ public Response getAllTaskStats(
     );
   }
 
+  @GET
+  @Path("/{id}/reindexingTimeline")
+  @Produces(MediaType.APPLICATION_JSON)
+  @ResourceFilters(SupervisorResourceFilter.class)
+  public Response getReindexingTimeline(
+      @PathParam("id") final String id,
+      @QueryParam("referenceTime") @Nullable final String referenceTimeStr
+  )
+  {
+    return asLeaderWithSupervisorManager(
+        manager -> {
+          Optional<SupervisorSpec> specOptional = 
manager.getSupervisorSpec(id);
+          if (!specOptional.isPresent()) {
+            return Response.status(Response.Status.NOT_FOUND)
+                           .entity(ImmutableMap.of("error", 
StringUtils.format("[%s] does not exist", id)))
+                           .build();
+          }
+
+          SupervisorSpec spec = specOptional.get();
+          if (!(spec instanceof CompactionSupervisorSpec)) {
+            return Response.status(Response.Status.BAD_REQUEST)
+                           .entity(ImmutableMap.of(
+                               "error",
+                               StringUtils.format(
+                                   "[%s] is not a compaction supervisor (type: 
%s)",

Review Comment:
   I think we can omit reporting back the actual type of the supervisor found.
   ```suggestion
                                      "Supervisor[%s] is not a compaction 
supervisor",
   ```



##########
indexing-service/src/main/java/org/apache/druid/indexing/compact/ReindexingTimelineView.java:
##########
@@ -0,0 +1,497 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexing.compact;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import org.apache.druid.server.compaction.ReindexingDataSchemaRule;
+import org.apache.druid.server.compaction.ReindexingDeletionRule;
+import org.apache.druid.server.compaction.ReindexingIOConfigRule;
+import org.apache.druid.server.compaction.ReindexingRule;
+import org.apache.druid.server.compaction.ReindexingSegmentGranularityRule;
+import org.apache.druid.server.compaction.ReindexingTuningConfigRule;
+import org.apache.druid.server.coordinator.DataSourceCompactionConfig;
+import org.joda.time.DateTime;
+import org.joda.time.Interval;
+import org.joda.time.Period;
+
+import javax.annotation.Nullable;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Represents the timeline of search intervals and their associated reindexing 
configurations
+ * for a cascading reindexing supervisor. This view helps operators understand 
how different
+ * rules are applied across time intervals.
+ */
+public class ReindexingTimelineView
+{
+  private final String dataSource;
+  private final DateTime referenceTime;
+  private final SkipOffsetInfo skipOffset;
+  private final List<IntervalConfig> intervals;
+  private final ValidationError validationError;
+
+  @JsonCreator
+  public ReindexingTimelineView(
+      @JsonProperty("dataSource") String dataSource,
+      @JsonProperty("referenceTime") DateTime referenceTime,
+      @JsonProperty("skipOffset") @Nullable SkipOffsetInfo skipOffset,
+      @JsonProperty("intervals") List<IntervalConfig> intervals,
+      @JsonProperty("validationError") @Nullable ValidationError 
validationError
+  )
+  {
+    this.dataSource = dataSource;
+    this.referenceTime = referenceTime;
+    this.skipOffset = skipOffset;
+    this.intervals = Collections.unmodifiableList(intervals);
+    this.validationError = validationError;
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @JsonProperty
+  public DateTime getReferenceTime()
+  {
+    return referenceTime;
+  }
+
+  @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  @Nullable
+  public SkipOffsetInfo getSkipOffset()
+  {
+    return skipOffset;
+  }
+
+  @JsonProperty
+  public List<IntervalConfig> getIntervals()
+  {
+    return intervals;
+  }
+
+  @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  @Nullable
+  public ValidationError getValidationError()
+  {
+    return validationError;
+  }
+
+  @Override
+  public boolean equals(Object o)
+  {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+    ReindexingTimelineView that = (ReindexingTimelineView) o;
+    return Objects.equals(dataSource, that.dataSource) &&
+           Objects.equals(referenceTime, that.referenceTime) &&
+           Objects.equals(skipOffset, that.skipOffset) &&
+           Objects.equals(intervals, that.intervals) &&
+           Objects.equals(validationError, that.validationError);
+  }
+
+  @Override
+  public int hashCode()
+  {
+    return Objects.hash(dataSource, referenceTime, skipOffset, intervals, 
validationError);
+  }
+
+  /**
+   * Information about a validation error that occurred while building the 
timeline.
+   */
+  public static class ValidationError
+  {
+    private final String errorType;
+    private final String message;
+    private final String olderInterval;
+    private final String olderGranularity;
+    private final String newerInterval;
+    private final String newerGranularity;
+
+    @JsonCreator
+    public ValidationError(
+        @JsonProperty("errorType") String errorType,
+        @JsonProperty("message") String message,
+        @JsonProperty("olderInterval") @Nullable String olderInterval,
+        @JsonProperty("olderGranularity") @Nullable String olderGranularity,
+        @JsonProperty("newerInterval") @Nullable String newerInterval,
+        @JsonProperty("newerGranularity") @Nullable String newerGranularity
+    )
+    {
+      this.errorType = errorType;
+      this.message = message;
+      this.olderInterval = olderInterval;
+      this.olderGranularity = olderGranularity;
+      this.newerInterval = newerInterval;
+      this.newerGranularity = newerGranularity;
+    }
+
+    @JsonProperty
+    public String getErrorType()
+    {
+      return errorType;
+    }
+
+    @JsonProperty
+    public String getMessage()
+    {
+      return message;
+    }
+
+    @JsonProperty
+    @JsonInclude(JsonInclude.Include.NON_NULL)
+    @Nullable
+    public String getOlderInterval()
+    {
+      return olderInterval;
+    }
+
+    @JsonProperty
+    @JsonInclude(JsonInclude.Include.NON_NULL)
+    @Nullable
+    public String getOlderGranularity()
+    {
+      return olderGranularity;
+    }
+
+    @JsonProperty
+    @JsonInclude(JsonInclude.Include.NON_NULL)
+    @Nullable
+    public String getNewerInterval()
+    {
+      return newerInterval;
+    }
+
+    @JsonProperty
+    @JsonInclude(JsonInclude.Include.NON_NULL)
+    @Nullable
+    public String getNewerGranularity()
+    {
+      return newerGranularity;
+    }
+
+    @Override
+    public boolean equals(Object o)
+    {
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
+      ValidationError that = (ValidationError) o;
+      return Objects.equals(errorType, that.errorType) &&
+             Objects.equals(message, that.message) &&
+             Objects.equals(olderInterval, that.olderInterval) &&
+             Objects.equals(olderGranularity, that.olderGranularity) &&
+             Objects.equals(newerInterval, that.newerInterval) &&
+             Objects.equals(newerGranularity, that.newerGranularity);
+    }
+
+    @Override
+    public int hashCode()
+    {
+      return Objects.hash(errorType, message, olderInterval, olderGranularity, 
newerInterval, newerGranularity);
+    }
+  }
+
+  /**
+   * Information about skip offsets and whether they were applied.
+   * Exactly one of {@code applied} or {@code notApplied} must be non-null.
+   */
+  public static class SkipOffsetInfo
+  {
+    private final AppliedSkipOffset applied;
+    private final NotAppliedSkipOffset notApplied;

Review Comment:
   Nit: These two classes feel a little unnecessary. We could have just have 
the actual fields in the class `SkipOffsetInfo` directly: `type`, `period`, a 
boolean for `isApplied`, a nullable `effectiveEndTime` mutually exclusive with 
a nullable `errorMessage`.



##########
indexing-service/src/test/java/org/apache/druid/indexing/compact/CascadingReindexingTemplateTest.java:
##########
@@ -1507,6 +1516,323 @@ public void 
test_generateAlignedSearchIntervals_failsWhenOlderRuleHasFinerGranul
     );
   }
 
+  /**
+   * Comprehensive test covering:
+   * - Multiple intervals with different segment granularities
+   * - All rule types (segment gran, data schema, deletion, tuning, IO)
+   * - Non-segment-gran rules triggering interval splitting
+   * - Applied rules tracking with correct rule types in each interval
+   * - Full DataSourceCompactionConfig generation
+   * - Rule count accuracy
+   */
+  @Test
+  public void test_getReindexingTimelineView_comprehensive()
+  {
+    DateTime referenceTime = DateTimes.of("2025-02-01T00:00:00Z");
+
+    // Create rules with various periods to test interval generation and 
splitting
+    ReindexingSegmentGranularityRule segGran7d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-7d",
+        null,
+        Period.days(7),
+        Granularities.HOUR
+    );
+
+    ReindexingSegmentGranularityRule segGran30d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-30d",
+        null,
+        Period.days(30),
+        Granularities.DAY
+    );
+
+    // Data schema rule at P15D (will split the HOUR interval)
+    ReindexingDataSchemaRule dataSchema15d = new ReindexingDataSchemaRule(
+        "data-schema-15d",
+        null,
+        Period.days(15),
+        new UserCompactionTaskDimensionsConfig(null),
+        new AggregatorFactory[]{new CountAggregatorFactory("count")},
+        Granularities.MINUTE,
+        true,
+        null
+    );
+
+    // Deletion rules at different periods
+    ReindexingDeletionRule deletion10d = new ReindexingDeletionRule(
+        "deletion-10d",
+        null,
+        Period.days(10),
+        new EqualityFilter("country", ColumnType.STRING, "US", null),
+        null
+    );
+
+    ReindexingDeletionRule deletion20d = new ReindexingDeletionRule(
+        "deletion-20d",
+        null,
+        Period.days(20),
+        new EqualityFilter("device", ColumnType.STRING, "mobile", null),
+        null
+    );
+
+    // Tuning and IO rules
+    ReindexingTuningConfigRule tuning7d = new ReindexingTuningConfigRule(
+        "tuning-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskQueryTuningConfig(

Review Comment:
   Didn't we add a builder for this class in the previous PR?



##########
indexing-service/src/test/java/org/apache/druid/indexing/compact/CascadingReindexingTemplateTest.java:
##########
@@ -1507,6 +1516,323 @@ public void 
test_generateAlignedSearchIntervals_failsWhenOlderRuleHasFinerGranul
     );
   }
 
+  /**
+   * Comprehensive test covering:
+   * - Multiple intervals with different segment granularities
+   * - All rule types (segment gran, data schema, deletion, tuning, IO)
+   * - Non-segment-gran rules triggering interval splitting
+   * - Applied rules tracking with correct rule types in each interval
+   * - Full DataSourceCompactionConfig generation
+   * - Rule count accuracy
+   */
+  @Test
+  public void test_getReindexingTimelineView_comprehensive()
+  {
+    DateTime referenceTime = DateTimes.of("2025-02-01T00:00:00Z");
+
+    // Create rules with various periods to test interval generation and 
splitting
+    ReindexingSegmentGranularityRule segGran7d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-7d",
+        null,
+        Period.days(7),
+        Granularities.HOUR
+    );
+
+    ReindexingSegmentGranularityRule segGran30d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-30d",
+        null,
+        Period.days(30),
+        Granularities.DAY
+    );
+
+    // Data schema rule at P15D (will split the HOUR interval)
+    ReindexingDataSchemaRule dataSchema15d = new ReindexingDataSchemaRule(
+        "data-schema-15d",
+        null,
+        Period.days(15),
+        new UserCompactionTaskDimensionsConfig(null),
+        new AggregatorFactory[]{new CountAggregatorFactory("count")},
+        Granularities.MINUTE,
+        true,
+        null
+    );
+
+    // Deletion rules at different periods
+    ReindexingDeletionRule deletion10d = new ReindexingDeletionRule(
+        "deletion-10d",
+        null,
+        Period.days(10),
+        new EqualityFilter("country", ColumnType.STRING, "US", null),
+        null
+    );
+
+    ReindexingDeletionRule deletion20d = new ReindexingDeletionRule(
+        "deletion-20d",
+        null,
+        Period.days(20),
+        new EqualityFilter("device", ColumnType.STRING, "mobile", null),
+        null
+    );
+
+    // Tuning and IO rules
+    ReindexingTuningConfigRule tuning7d = new ReindexingTuningConfigRule(
+        "tuning-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskQueryTuningConfig(
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null
+        )
+    );
+
+    ReindexingIOConfigRule io7d = new ReindexingIOConfigRule(
+        "io-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskIOConfig(true)
+    );
+
+    ReindexingRuleProvider provider = InlineReindexingRuleProvider.builder()
+                                                                  
.segmentGranularityRules(List.of(segGran7d, segGran30d))
+                                                                  
.dataSchemaRules(List.of(dataSchema15d))
+                                                                  
.deletionRules(List.of(deletion10d, deletion20d))
+                                                                  
.tuningConfigRules(List.of(tuning7d))
+                                                                  
.ioConfigRules(List.of(io7d))
+                                                                  .build();
+
+    CascadingReindexingTemplate template = new CascadingReindexingTemplate(
+        "testDS",
+        null,
+        null,
+        provider,
+        null,
+        null,
+        null,
+        null,
+        Granularities.DAY
+    );
+
+    ReindexingTimelineView timeline = 
template.getReindexingTimelineView(referenceTime);
+
+    // Verify basic timeline properties
+    Assertions.assertEquals("testDS", timeline.getDataSource());
+    Assertions.assertEquals(referenceTime, timeline.getReferenceTime());
+    Assertions.assertNull(timeline.getValidationError());
+    Assertions.assertNull(timeline.getSkipOffset());
+
+    // Verify we have multiple intervals (splitting should occur)
+    Assertions.assertTrue(timeline.getIntervals().size() >= 2, "Expected at 
least 2 intervals");
+
+    // Verify each interval has correct structure
+    for (ReindexingTimelineView.IntervalConfig intervalConfig : 
timeline.getIntervals()) {
+      Assertions.assertNotNull(intervalConfig.getInterval());
+      Assertions.assertTrue(intervalConfig.getRuleCount() > 0, "Rule count 
should be > 0");
+      Assertions.assertNotNull(intervalConfig.getConfig());
+      Assertions.assertNotNull(intervalConfig.getAppliedRules());
+      Assertions.assertEquals(
+          intervalConfig.getRuleCount(),
+          intervalConfig.getAppliedRules().size(),
+          "Applied rules size should match rule count"
+      );
+
+      // Verify config has expected components
+      DataSourceCompactionConfig config = intervalConfig.getConfig();
+      Assertions.assertNotNull(config.getGranularitySpec(), "Should have 
granularity spec");
+      
Assertions.assertNotNull(config.getGranularitySpec().getSegmentGranularity(), 
"Should have segment granularity");
+
+      // Verify appliedRules contain expected rule types
+      boolean hasTuningRule = intervalConfig.getAppliedRules().stream()
+                                            .anyMatch(r -> r instanceof 
ReindexingTuningConfigRule);
+      boolean hasIORule = intervalConfig.getAppliedRules().stream()
+                                        .anyMatch(r -> r instanceof 
ReindexingIOConfigRule);
+      boolean hasDataSchemaRule = intervalConfig.getAppliedRules().stream()
+                                                .anyMatch(r -> r instanceof 
ReindexingDataSchemaRule);
+      boolean hasDeletionRule = intervalConfig.getAppliedRules().stream()
+                                              .anyMatch(r -> r instanceof 
ReindexingDeletionRule);
+      boolean hasSegmentGranRule = intervalConfig.getAppliedRules().stream()
+                                                 .anyMatch(r -> r instanceof 
ReindexingSegmentGranularityRule);
+
+      // Most recent intervals should have more rules applied
+      if 
(intervalConfig.getInterval().getEnd().isAfter(referenceTime.minusDays(10))) {
+        Assertions.assertTrue(hasTuningRule, "Recent intervals should have 
tuning rules");
+        Assertions.assertTrue(hasIORule, "Recent intervals should have IO 
rules");
+      }
+    }
+  }
+
+  /**
+   * Test that skipOffsetFromNow correctly skips intervals and populates 
skipOffset.applied
+   */
+  @Test
+  public void 
test_getReindexingTimelineView_skipOffsetFromNow_skipsProperIntervals()
+  {
+    DateTime referenceTime = DateTimes.of("2025-01-29T00:00:00Z");
+    Period skipOffset = Period.days(10);
+
+    // Create rules where the most recent rule has a period SMALLER than the 
skip offset
+    // This ensures the interval would extend beyond the effectiveEndTime and 
get clamped
+    ReindexingRuleProvider provider = InlineReindexingRuleProvider.builder()
+                                                                  
.segmentGranularityRules(List.of(
+                                                                      new 
ReindexingSegmentGranularityRule("seg-3d", null, Period.days(3), 
Granularities.HOUR),
+                                                                      new 
ReindexingSegmentGranularityRule("seg-30d", null, Period.days(30), 
Granularities.DAY)
+                                                                  ))
+                                                                  .build();
+
+    CascadingReindexingTemplate template = new CascadingReindexingTemplate(
+        "testDS",
+        null,
+        null,
+        provider,
+        null,
+        null,
+        null,
+        skipOffset, // skipOffsetFromNow
+        Granularities.DAY
+    );
+
+    ReindexingTimelineView timeline = 
template.getReindexingTimelineView(referenceTime);
+
+    // Verify skipOffset is applied
+    Assertions.assertNotNull(timeline.getSkipOffset(), "Skip offset should be 
present");
+    Assertions.assertNotNull(timeline.getSkipOffset().getApplied(), "Skip 
offset should be applied");
+    Assertions.assertNull(timeline.getSkipOffset().getNotApplied(), "Skip 
offset notApplied should be null");
+
+    ReindexingTimelineView.AppliedSkipOffset applied = 
timeline.getSkipOffset().getApplied();
+    Assertions.assertEquals("skipOffsetFromNow", applied.getType());
+    Assertions.assertEquals(skipOffset, applied.getPeriod());
+
+    DateTime expectedEffectiveEndTime = referenceTime.minus(skipOffset);
+    Assertions.assertEquals(expectedEffectiveEndTime, 
applied.getEffectiveEndTime());
+
+    for (ReindexingTimelineView.IntervalConfig intervalConfig : 
timeline.getIntervals()) {
+      if 
(intervalConfig.getInterval().getEnd().isAfter(expectedEffectiveEndTime)) {
+        Assertions.assertEquals(0, intervalConfig.getRuleCount());
+      }
+    }
+  }
+
+  /**
+   * Test validation error when granularity timeline is invalid
+   */
+  @Test
+  public void 
test_getReindexingTimelineView_validationError_invalidGranularityTimeline()
+  {
+    DateTime referenceTime = DateTimes.of("2025-01-29T16:15:00Z");
+
+    // Create rules that violate the granularity constraint:
+    // Older data (P90D) has DAY granularity, newer data (P30D) has HOUR 
granularity
+    // This means as we move from past to present, granularity gets finer 
(valid)
+    // But then if we add MONTH for recent data, it becomes coarser (invalid)
+    ReindexingRuleProvider provider = InlineReindexingRuleProvider.builder()

Review Comment:
   Style tip: Move the `.builder()` to the next line to make the whole thing 
feel less crowded.
   ```suggestion
       ReindexingRuleProvider provider = InlineReindexingRuleProvider
           .builder()
   ```



##########
indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResource.java:
##########
@@ -351,6 +357,80 @@ public Response getAllTaskStats(
     );
   }
 
+  @GET
+  @Path("/{id}/reindexingTimeline")
+  @Produces(MediaType.APPLICATION_JSON)
+  @ResourceFilters(SupervisorResourceFilter.class)
+  public Response getReindexingTimeline(
+      @PathParam("id") final String id,
+      @QueryParam("referenceTime") @Nullable final String referenceTimeStr
+  )
+  {
+    return asLeaderWithSupervisorManager(
+        manager -> {
+          Optional<SupervisorSpec> specOptional = 
manager.getSupervisorSpec(id);
+          if (!specOptional.isPresent()) {
+            return Response.status(Response.Status.NOT_FOUND)
+                           .entity(ImmutableMap.of("error", 
StringUtils.format("[%s] does not exist", id)))
+                           .build();
+          }
+
+          SupervisorSpec spec = specOptional.get();
+          if (!(spec instanceof CompactionSupervisorSpec)) {
+            return Response.status(Response.Status.BAD_REQUEST)
+                           .entity(ImmutableMap.of(
+                               "error",
+                               StringUtils.format(
+                                   "[%s] is not a compaction supervisor (type: 
%s)",
+                                   id,
+                                   spec.getClass().getSimpleName()
+                               )
+                           ))
+                           .build();
+          }
+
+          CompactionSupervisorSpec compactionSpec = (CompactionSupervisorSpec) 
spec;
+
+          DateTime referenceTime;
+          if (referenceTimeStr != null) {
+            try {
+              referenceTime = DateTimes.of(referenceTimeStr);
+            }
+            catch (IllegalArgumentException e) {
+              return Response.status(Response.Status.BAD_REQUEST)
+                             .entity(ImmutableMap.of(
+                                 "error",
+                                 StringUtils.format("Invalid referenceTime 
format: %s", referenceTimeStr)

Review Comment:
   ```suggestion
                                    StringUtils.format("Reference time[%s] is 
in invalid format. Use ISO 8601 standard format.")
   ```
   
   By the way, you could also try doing the following to make the various 
conditions in this method cleaner.
   ```java
   return ServletResourceUtils.buildErrorResponseFrom(
       InvalidInput.exception(...)
   );
   ```



##########
indexing-service/src/test/java/org/apache/druid/indexing/compact/CascadingReindexingTemplateTest.java:
##########
@@ -1507,6 +1516,323 @@ public void 
test_generateAlignedSearchIntervals_failsWhenOlderRuleHasFinerGranul
     );
   }
 
+  /**
+   * Comprehensive test covering:
+   * - Multiple intervals with different segment granularities
+   * - All rule types (segment gran, data schema, deletion, tuning, IO)
+   * - Non-segment-gran rules triggering interval splitting
+   * - Applied rules tracking with correct rule types in each interval
+   * - Full DataSourceCompactionConfig generation
+   * - Rule count accuracy
+   */
+  @Test
+  public void test_getReindexingTimelineView_comprehensive()
+  {
+    DateTime referenceTime = DateTimes.of("2025-02-01T00:00:00Z");
+
+    // Create rules with various periods to test interval generation and 
splitting
+    ReindexingSegmentGranularityRule segGran7d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-7d",
+        null,
+        Period.days(7),
+        Granularities.HOUR
+    );
+
+    ReindexingSegmentGranularityRule segGran30d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-30d",
+        null,
+        Period.days(30),
+        Granularities.DAY
+    );
+
+    // Data schema rule at P15D (will split the HOUR interval)
+    ReindexingDataSchemaRule dataSchema15d = new ReindexingDataSchemaRule(
+        "data-schema-15d",
+        null,
+        Period.days(15),
+        new UserCompactionTaskDimensionsConfig(null),
+        new AggregatorFactory[]{new CountAggregatorFactory("count")},
+        Granularities.MINUTE,
+        true,
+        null
+    );
+
+    // Deletion rules at different periods
+    ReindexingDeletionRule deletion10d = new ReindexingDeletionRule(
+        "deletion-10d",
+        null,
+        Period.days(10),
+        new EqualityFilter("country", ColumnType.STRING, "US", null),
+        null
+    );
+
+    ReindexingDeletionRule deletion20d = new ReindexingDeletionRule(
+        "deletion-20d",
+        null,
+        Period.days(20),
+        new EqualityFilter("device", ColumnType.STRING, "mobile", null),
+        null
+    );
+
+    // Tuning and IO rules
+    ReindexingTuningConfigRule tuning7d = new ReindexingTuningConfigRule(
+        "tuning-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskQueryTuningConfig(
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null
+        )
+    );
+
+    ReindexingIOConfigRule io7d = new ReindexingIOConfigRule(
+        "io-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskIOConfig(true)
+    );
+
+    ReindexingRuleProvider provider = InlineReindexingRuleProvider.builder()
+                                                                  
.segmentGranularityRules(List.of(segGran7d, segGran30d))
+                                                                  
.dataSchemaRules(List.of(dataSchema15d))
+                                                                  
.deletionRules(List.of(deletion10d, deletion20d))
+                                                                  
.tuningConfigRules(List.of(tuning7d))
+                                                                  
.ioConfigRules(List.of(io7d))
+                                                                  .build();
+
+    CascadingReindexingTemplate template = new CascadingReindexingTemplate(
+        "testDS",
+        null,
+        null,
+        provider,
+        null,
+        null,
+        null,
+        null,
+        Granularities.DAY
+    );
+
+    ReindexingTimelineView timeline = 
template.getReindexingTimelineView(referenceTime);
+
+    // Verify basic timeline properties
+    Assertions.assertEquals("testDS", timeline.getDataSource());
+    Assertions.assertEquals(referenceTime, timeline.getReferenceTime());
+    Assertions.assertNull(timeline.getValidationError());
+    Assertions.assertNull(timeline.getSkipOffset());
+
+    // Verify we have multiple intervals (splitting should occur)
+    Assertions.assertTrue(timeline.getIntervals().size() >= 2, "Expected at 
least 2 intervals");
+
+    // Verify each interval has correct structure
+    for (ReindexingTimelineView.IntervalConfig intervalConfig : 
timeline.getIntervals()) {
+      Assertions.assertNotNull(intervalConfig.getInterval());
+      Assertions.assertTrue(intervalConfig.getRuleCount() > 0, "Rule count 
should be > 0");
+      Assertions.assertNotNull(intervalConfig.getConfig());
+      Assertions.assertNotNull(intervalConfig.getAppliedRules());
+      Assertions.assertEquals(
+          intervalConfig.getRuleCount(),
+          intervalConfig.getAppliedRules().size(),
+          "Applied rules size should match rule count"
+      );
+
+      // Verify config has expected components
+      DataSourceCompactionConfig config = intervalConfig.getConfig();
+      Assertions.assertNotNull(config.getGranularitySpec(), "Should have 
granularity spec");
+      
Assertions.assertNotNull(config.getGranularitySpec().getSegmentGranularity(), 
"Should have segment granularity");
+
+      // Verify appliedRules contain expected rule types
+      boolean hasTuningRule = intervalConfig.getAppliedRules().stream()
+                                            .anyMatch(r -> r instanceof 
ReindexingTuningConfigRule);
+      boolean hasIORule = intervalConfig.getAppliedRules().stream()
+                                        .anyMatch(r -> r instanceof 
ReindexingIOConfigRule);
+      boolean hasDataSchemaRule = intervalConfig.getAppliedRules().stream()
+                                                .anyMatch(r -> r instanceof 
ReindexingDataSchemaRule);
+      boolean hasDeletionRule = intervalConfig.getAppliedRules().stream()
+                                              .anyMatch(r -> r instanceof 
ReindexingDeletionRule);
+      boolean hasSegmentGranRule = intervalConfig.getAppliedRules().stream()
+                                                 .anyMatch(r -> r instanceof 
ReindexingSegmentGranularityRule);
+
+      // Most recent intervals should have more rules applied
+      if 
(intervalConfig.getInterval().getEnd().isAfter(referenceTime.minusDays(10))) {
+        Assertions.assertTrue(hasTuningRule, "Recent intervals should have 
tuning rules");
+        Assertions.assertTrue(hasIORule, "Recent intervals should have IO 
rules");
+      }
+    }
+  }
+
+  /**
+   * Test that skipOffsetFromNow correctly skips intervals and populates 
skipOffset.applied
+   */

Review Comment:
   Not really needed as the test name already captures this.



##########
indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResource.java:
##########
@@ -351,6 +357,80 @@ public Response getAllTaskStats(
     );
   }
 
+  @GET
+  @Path("/{id}/reindexingTimeline")
+  @Produces(MediaType.APPLICATION_JSON)
+  @ResourceFilters(SupervisorResourceFilter.class)
+  public Response getReindexingTimeline(
+      @PathParam("id") final String id,
+      @QueryParam("referenceTime") @Nullable final String referenceTimeStr
+  )
+  {
+    return asLeaderWithSupervisorManager(
+        manager -> {
+          Optional<SupervisorSpec> specOptional = 
manager.getSupervisorSpec(id);
+          if (!specOptional.isPresent()) {
+            return Response.status(Response.Status.NOT_FOUND)
+                           .entity(ImmutableMap.of("error", 
StringUtils.format("[%s] does not exist", id)))

Review Comment:
   ```suggestion
                              .entity(ImmutableMap.of("error", 
StringUtils.format("Supervisor[%s] does not exist", id)))
   ```



##########
indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResource.java:
##########
@@ -351,6 +357,80 @@ public Response getAllTaskStats(
     );
   }
 
+  @GET
+  @Path("/{id}/reindexingTimeline")
+  @Produces(MediaType.APPLICATION_JSON)
+  @ResourceFilters(SupervisorResourceFilter.class)
+  public Response getReindexingTimeline(
+      @PathParam("id") final String id,
+      @QueryParam("referenceTime") @Nullable final String referenceTimeStr
+  )
+  {
+    return asLeaderWithSupervisorManager(
+        manager -> {
+          Optional<SupervisorSpec> specOptional = 
manager.getSupervisorSpec(id);
+          if (!specOptional.isPresent()) {
+            return Response.status(Response.Status.NOT_FOUND)
+                           .entity(ImmutableMap.of("error", 
StringUtils.format("[%s] does not exist", id)))
+                           .build();
+          }
+
+          SupervisorSpec spec = specOptional.get();
+          if (!(spec instanceof CompactionSupervisorSpec)) {
+            return Response.status(Response.Status.BAD_REQUEST)
+                           .entity(ImmutableMap.of(
+                               "error",
+                               StringUtils.format(
+                                   "[%s] is not a compaction supervisor (type: 
%s)",
+                                   id,
+                                   spec.getClass().getSimpleName()
+                               )
+                           ))
+                           .build();
+          }
+
+          CompactionSupervisorSpec compactionSpec = (CompactionSupervisorSpec) 
spec;
+
+          DateTime referenceTime;
+          if (referenceTimeStr != null) {
+            try {
+              referenceTime = DateTimes.of(referenceTimeStr);
+            }
+            catch (IllegalArgumentException e) {
+              return Response.status(Response.Status.BAD_REQUEST)
+                             .entity(ImmutableMap.of(
+                                 "error",
+                                 StringUtils.format("Invalid referenceTime 
format: %s", referenceTimeStr)
+                             ))
+                             .build();
+            }
+          } else {
+            referenceTime = DateTimes.nowUtc();
+          }
+
+          CompactionJobTemplate template = compactionSpec.getTemplate();
+          if (!(template instanceof CascadingReindexingTemplate)) {
+            return Response.status(Response.Status.BAD_REQUEST)
+                           .entity(ImmutableMap.of(
+                               "error",
+                               StringUtils.format(
+                                   "Reindexing timeline is only available for 
cascading reindexing supervisors. " +
+                                   "Supervisor [%s] uses template type: %s",

Review Comment:
   ```suggestion
                                      "Reindexing timeline is not available for 
supervisor[%s] as it does not use a cascading template",
   ```



##########
indexing-service/src/test/java/org/apache/druid/indexing/compact/CascadingReindexingTemplateTest.java:
##########
@@ -1507,6 +1516,323 @@ public void 
test_generateAlignedSearchIntervals_failsWhenOlderRuleHasFinerGranul
     );
   }
 
+  /**
+   * Comprehensive test covering:
+   * - Multiple intervals with different segment granularities
+   * - All rule types (segment gran, data schema, deletion, tuning, IO)
+   * - Non-segment-gran rules triggering interval splitting
+   * - Applied rules tracking with correct rule types in each interval
+   * - Full DataSourceCompactionConfig generation
+   * - Rule count accuracy
+   */
+  @Test
+  public void test_getReindexingTimelineView_comprehensive()
+  {
+    DateTime referenceTime = DateTimes.of("2025-02-01T00:00:00Z");
+
+    // Create rules with various periods to test interval generation and 
splitting
+    ReindexingSegmentGranularityRule segGran7d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-7d",
+        null,
+        Period.days(7),
+        Granularities.HOUR
+    );
+
+    ReindexingSegmentGranularityRule segGran30d = new 
ReindexingSegmentGranularityRule(
+        "seg-gran-30d",
+        null,
+        Period.days(30),
+        Granularities.DAY
+    );
+
+    // Data schema rule at P15D (will split the HOUR interval)
+    ReindexingDataSchemaRule dataSchema15d = new ReindexingDataSchemaRule(
+        "data-schema-15d",
+        null,
+        Period.days(15),
+        new UserCompactionTaskDimensionsConfig(null),
+        new AggregatorFactory[]{new CountAggregatorFactory("count")},
+        Granularities.MINUTE,
+        true,
+        null
+    );
+
+    // Deletion rules at different periods
+    ReindexingDeletionRule deletion10d = new ReindexingDeletionRule(
+        "deletion-10d",
+        null,
+        Period.days(10),
+        new EqualityFilter("country", ColumnType.STRING, "US", null),
+        null
+    );
+
+    ReindexingDeletionRule deletion20d = new ReindexingDeletionRule(
+        "deletion-20d",
+        null,
+        Period.days(20),
+        new EqualityFilter("device", ColumnType.STRING, "mobile", null),
+        null
+    );
+
+    // Tuning and IO rules
+    ReindexingTuningConfigRule tuning7d = new ReindexingTuningConfigRule(
+        "tuning-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskQueryTuningConfig(
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null,
+            null
+        )
+    );
+
+    ReindexingIOConfigRule io7d = new ReindexingIOConfigRule(
+        "io-7d",
+        null,
+        Period.days(7),
+        new UserCompactionTaskIOConfig(true)
+    );
+
+    ReindexingRuleProvider provider = InlineReindexingRuleProvider.builder()
+                                                                  
.segmentGranularityRules(List.of(segGran7d, segGran30d))
+                                                                  
.dataSchemaRules(List.of(dataSchema15d))
+                                                                  
.deletionRules(List.of(deletion10d, deletion20d))
+                                                                  
.tuningConfigRules(List.of(tuning7d))
+                                                                  
.ioConfigRules(List.of(io7d))
+                                                                  .build();
+
+    CascadingReindexingTemplate template = new CascadingReindexingTemplate(
+        "testDS",
+        null,
+        null,
+        provider,
+        null,
+        null,
+        null,
+        null,
+        Granularities.DAY
+    );
+
+    ReindexingTimelineView timeline = 
template.getReindexingTimelineView(referenceTime);
+
+    // Verify basic timeline properties
+    Assertions.assertEquals("testDS", timeline.getDataSource());
+    Assertions.assertEquals(referenceTime, timeline.getReferenceTime());
+    Assertions.assertNull(timeline.getValidationError());
+    Assertions.assertNull(timeline.getSkipOffset());
+
+    // Verify we have multiple intervals (splitting should occur)
+    Assertions.assertTrue(timeline.getIntervals().size() >= 2, "Expected at 
least 2 intervals");
+
+    // Verify each interval has correct structure
+    for (ReindexingTimelineView.IntervalConfig intervalConfig : 
timeline.getIntervals()) {
+      Assertions.assertNotNull(intervalConfig.getInterval());
+      Assertions.assertTrue(intervalConfig.getRuleCount() > 0, "Rule count 
should be > 0");
+      Assertions.assertNotNull(intervalConfig.getConfig());
+      Assertions.assertNotNull(intervalConfig.getAppliedRules());
+      Assertions.assertEquals(
+          intervalConfig.getRuleCount(),
+          intervalConfig.getAppliedRules().size(),
+          "Applied rules size should match rule count"
+      );
+
+      // Verify config has expected components
+      DataSourceCompactionConfig config = intervalConfig.getConfig();
+      Assertions.assertNotNull(config.getGranularitySpec(), "Should have 
granularity spec");
+      
Assertions.assertNotNull(config.getGranularitySpec().getSegmentGranularity(), 
"Should have segment granularity");
+
+      // Verify appliedRules contain expected rule types
+      boolean hasTuningRule = intervalConfig.getAppliedRules().stream()
+                                            .anyMatch(r -> r instanceof 
ReindexingTuningConfigRule);
+      boolean hasIORule = intervalConfig.getAppliedRules().stream()
+                                        .anyMatch(r -> r instanceof 
ReindexingIOConfigRule);
+      boolean hasDataSchemaRule = intervalConfig.getAppliedRules().stream()
+                                                .anyMatch(r -> r instanceof 
ReindexingDataSchemaRule);
+      boolean hasDeletionRule = intervalConfig.getAppliedRules().stream()
+                                              .anyMatch(r -> r instanceof 
ReindexingDeletionRule);
+      boolean hasSegmentGranRule = intervalConfig.getAppliedRules().stream()
+                                                 .anyMatch(r -> r instanceof 
ReindexingSegmentGranularityRule);
+
+      // Most recent intervals should have more rules applied
+      if 
(intervalConfig.getInterval().getEnd().isAfter(referenceTime.minusDays(10))) {
+        Assertions.assertTrue(hasTuningRule, "Recent intervals should have 
tuning rules");
+        Assertions.assertTrue(hasIORule, "Recent intervals should have IO 
rules");
+      }
+    }
+  }
+
+  /**
+   * Test that skipOffsetFromNow correctly skips intervals and populates 
skipOffset.applied
+   */
+  @Test
+  public void 
test_getReindexingTimelineView_skipOffsetFromNow_skipsProperIntervals()
+  {
+    DateTime referenceTime = DateTimes.of("2025-01-29T00:00:00Z");
+    Period skipOffset = Period.days(10);
+
+    // Create rules where the most recent rule has a period SMALLER than the 
skip offset
+    // This ensures the interval would extend beyond the effectiveEndTime and 
get clamped
+    ReindexingRuleProvider provider = InlineReindexingRuleProvider.builder()
+                                                                  
.segmentGranularityRules(List.of(
+                                                                      new 
ReindexingSegmentGranularityRule("seg-3d", null, Period.days(3), 
Granularities.HOUR),
+                                                                      new 
ReindexingSegmentGranularityRule("seg-30d", null, Period.days(30), 
Granularities.DAY)
+                                                                  ))
+                                                                  .build();
+
+    CascadingReindexingTemplate template = new CascadingReindexingTemplate(
+        "testDS",
+        null,
+        null,
+        provider,
+        null,
+        null,
+        null,
+        skipOffset, // skipOffsetFromNow
+        Granularities.DAY
+    );
+
+    ReindexingTimelineView timeline = 
template.getReindexingTimelineView(referenceTime);
+
+    // Verify skipOffset is applied
+    Assertions.assertNotNull(timeline.getSkipOffset(), "Skip offset should be 
present");
+    Assertions.assertNotNull(timeline.getSkipOffset().getApplied(), "Skip 
offset should be applied");
+    Assertions.assertNull(timeline.getSkipOffset().getNotApplied(), "Skip 
offset notApplied should be null");
+
+    ReindexingTimelineView.AppliedSkipOffset applied = 
timeline.getSkipOffset().getApplied();
+    Assertions.assertEquals("skipOffsetFromNow", applied.getType());
+    Assertions.assertEquals(skipOffset, applied.getPeriod());
+
+    DateTime expectedEffectiveEndTime = referenceTime.minus(skipOffset);
+    Assertions.assertEquals(expectedEffectiveEndTime, 
applied.getEffectiveEndTime());
+
+    for (ReindexingTimelineView.IntervalConfig intervalConfig : 
timeline.getIntervals()) {
+      if 
(intervalConfig.getInterval().getEnd().isAfter(expectedEffectiveEndTime)) {
+        Assertions.assertEquals(0, intervalConfig.getRuleCount());
+      }
+    }
+  }
+
+  /**
+   * Test validation error when granularity timeline is invalid
+   */

Review Comment:
   Not really needed since the test name captures this already.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to