abhishekagarwal87 commented on code in PR #15085:
URL: https://github.com/apache/druid/pull/15085#discussion_r1363456289


##########
indexing-service/src/main/java/org/apache/druid/indexing/common/actions/RetrieveSegmentsToReplaceAction.java:
##########
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexing.common.actions;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.druid.indexing.common.task.Task;
+import 
org.apache.druid.indexing.common.task.batch.parallel.AbstractBatchSubtask;
+import org.apache.druid.indexing.overlord.Segments;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.metadata.ReplaceTaskLock;
+import org.apache.druid.timeline.DataSegment;
+import org.apache.druid.timeline.Partitions;
+import org.apache.druid.timeline.SegmentTimeline;
+import org.joda.time.Interval;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This TaskAction returns a collection of segments which have data within the 
specified interval and are marked as
+ * used, and have been created before a REPLACE lock, if any, was acquired.
+ *
+ * The order of segments within the returned collection is unspecified, but 
each segment is guaranteed to appear in
+ * the collection only once.
+ */

Review Comment:
   Please add 
   
   This action exists in addition to retrieveUsedSegmentsAction because that 
action suffers from a race condition described here 
   
   <add the race condition here>



##########
server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java:
##########
@@ -174,15 +174,76 @@ private Collection<DataSegment> doRetrieveUsedSegments(
   }
 
   @Override
-  public List<Pair<DataSegment, String>> 
retrieveUsedSegmentsAndCreatedDates(String dataSource)
+  public List<Pair<DataSegment, String>> 
retrieveUsedSegmentsAndCreatedDates(String dataSource, Interval interval)
   {
-    String rawQueryString = "SELECT created_date, payload FROM %1$s WHERE 
dataSource = :dataSource AND used = true";
-    final String queryString = StringUtils.format(rawQueryString, 
dbTables.getSegmentsTable());
+    StringBuilder queryBuilder = new StringBuilder(
+        "SELECT created_date, payload FROM %1$s WHERE dataSource = :dataSource 
AND used = true"
+    );
+
+    final boolean intervalStartIsEternityStart = 
Intervals.ETERNITY.getStart().equals(interval.getStart());

Review Comment:
   If it is, can the code be reused. 



##########
indexing-service/src/main/java/org/apache/druid/indexing/common/actions/RetrieveSegmentsToReplaceAction.java:
##########
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexing.common.actions;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.druid.indexing.common.task.Task;
+import 
org.apache.druid.indexing.common.task.batch.parallel.AbstractBatchSubtask;
+import org.apache.druid.indexing.overlord.Segments;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.metadata.ReplaceTaskLock;
+import org.apache.druid.timeline.DataSegment;
+import org.apache.druid.timeline.Partitions;
+import org.apache.druid.timeline.SegmentTimeline;
+import org.joda.time.Interval;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This TaskAction returns a collection of segments which have data within the 
specified interval and are marked as
+ * used, and have been created before a REPLACE lock, if any, was acquired.
+ *
+ * The order of segments within the returned collection is unspecified, but 
each segment is guaranteed to appear in
+ * the collection only once.
+ */
+public class RetrieveSegmentsToReplaceAction implements 
TaskAction<Collection<DataSegment>>
+{
+  @JsonIgnore
+  private final String dataSource;
+
+  @JsonIgnore
+  private final Interval interval;
+
+  @JsonCreator
+  public RetrieveSegmentsToReplaceAction(
+      @JsonProperty("dataSource") String dataSource,
+      @JsonProperty("interval") Interval interval
+  )
+  {
+    this.dataSource = dataSource;
+    this.interval = interval;
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @JsonProperty
+  public Interval getInterval()
+  {
+    return interval;
+  }
+
+  @Override
+  public TypeReference<Collection<DataSegment>> getReturnTypeReference()
+  {
+    return new TypeReference<Collection<DataSegment>>() {};
+  }
+
+  @Override
+  public Collection<DataSegment> perform(Task task, TaskActionToolbox toolbox)
+  {
+    // The DruidInputSource can be used to read from one datasource and write 
to another.
+    // In such a case, the action can simply fetch all visible segments for 
the datasource and interval
+    if (!task.getDataSource().equals(dataSource)) {
+      return retrieveAllVisibleSegments(toolbox);
+    }
+
+    final String supervisorId;
+    if (task instanceof AbstractBatchSubtask) {
+      supervisorId = ((AbstractBatchSubtask) task).getSupervisorTaskId();
+    } else {
+      supervisorId = task.getId();
+    }
+
+    final Set<ReplaceTaskLock> replaceLocksForTask = toolbox
+        .getTaskLockbox()
+        .getAllReplaceLocksForDatasource(task.getDataSource())
+        .stream()
+        .filter(lock -> supervisorId.equals(lock.getSupervisorTaskId()))
+        .collect(Collectors.toSet());
+
+    // If there are no replace locks for the task, simply fetch all visible 
segments for the interval
+    if (replaceLocksForTask.isEmpty()) {
+      return retrieveAllVisibleSegments(toolbox);
+    }
+
+    Map<Interval, Map<String, Set<DataSegment>>> intervalToCreatedToSegments = 
new HashMap<>();
+    for (Pair<DataSegment, String> segmentAndCreatedDate :
+        
toolbox.getIndexerMetadataStorageCoordinator().retrieveUsedSegmentsAndCreatedDates(dataSource,
 interval)) {
+      final DataSegment segment = segmentAndCreatedDate.lhs;
+      final String created = segmentAndCreatedDate.rhs;
+      intervalToCreatedToSegments.computeIfAbsent(segment.getInterval(), s -> 
new HashMap<>())
+                                 .computeIfAbsent(created, c -> new 
HashSet<>())
+                                 .add(segment);
+    }
+
+    Set<DataSegment> allSegmentsToBeReplaced = new HashSet<>();
+    for (final Map.Entry<Interval, Map<String, Set<DataSegment>>> entry : 
intervalToCreatedToSegments.entrySet()) {
+      final Interval segmentInterval = entry.getKey();
+      String lockVersion = null;
+      for (ReplaceTaskLock replaceLock : replaceLocksForTask) {
+        if (replaceLock.getInterval().contains(segmentInterval)) {
+          lockVersion = replaceLock.getVersion();
+        }
+      }
+      final Map<String, Set<DataSegment>> createdToSegmentsMap = 
entry.getValue();
+      for (Map.Entry<String, Set<DataSegment>> createdAndSegments : 
createdToSegmentsMap.entrySet()) {
+        if (lockVersion == null || 
lockVersion.compareTo(createdAndSegments.getKey()) > 0) {
+          allSegmentsToBeReplaced.addAll(createdAndSegments.getValue());

Review Comment:
   we should add a log line perhaps when a segment wasn't considered because it 
was created after replace lock was acquired. I am assuming it will not be 
frequent. 



##########
indexing-service/src/main/java/org/apache/druid/indexing/common/actions/RetrieveSegmentsToReplaceAction.java:
##########
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexing.common.actions;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.druid.indexing.common.task.Task;
+import 
org.apache.druid.indexing.common.task.batch.parallel.AbstractBatchSubtask;
+import org.apache.druid.indexing.overlord.Segments;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.metadata.ReplaceTaskLock;
+import org.apache.druid.timeline.DataSegment;
+import org.apache.druid.timeline.Partitions;
+import org.apache.druid.timeline.SegmentTimeline;
+import org.joda.time.Interval;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This TaskAction returns a collection of segments which have data within the 
specified interval and are marked as
+ * used, and have been created before a REPLACE lock, if any, was acquired.
+ *
+ * The order of segments within the returned collection is unspecified, but 
each segment is guaranteed to appear in
+ * the collection only once.
+ */
+public class RetrieveSegmentsToReplaceAction implements 
TaskAction<Collection<DataSegment>>
+{
+  @JsonIgnore
+  private final String dataSource;
+
+  @JsonIgnore
+  private final Interval interval;
+
+  @JsonCreator
+  public RetrieveSegmentsToReplaceAction(
+      @JsonProperty("dataSource") String dataSource,
+      @JsonProperty("interval") Interval interval
+  )
+  {
+    this.dataSource = dataSource;
+    this.interval = interval;
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @JsonProperty
+  public Interval getInterval()
+  {
+    return interval;
+  }
+
+  @Override
+  public TypeReference<Collection<DataSegment>> getReturnTypeReference()
+  {
+    return new TypeReference<Collection<DataSegment>>() {};
+  }
+
+  @Override
+  public Collection<DataSegment> perform(Task task, TaskActionToolbox toolbox)
+  {
+    // The DruidInputSource can be used to read from one datasource and write 
to another.
+    // In such a case, the action can simply fetch all visible segments for 
the datasource and interval
+    if (!task.getDataSource().equals(dataSource)) {
+      return retrieveAllVisibleSegments(toolbox);
+    }
+
+    final String supervisorId;
+    if (task instanceof AbstractBatchSubtask) {
+      supervisorId = ((AbstractBatchSubtask) task).getSupervisorTaskId();
+    } else {
+      supervisorId = task.getId();
+    }
+
+    final Set<ReplaceTaskLock> replaceLocksForTask = toolbox
+        .getTaskLockbox()
+        .getAllReplaceLocksForDatasource(task.getDataSource())
+        .stream()
+        .filter(lock -> supervisorId.equals(lock.getSupervisorTaskId()))
+        .collect(Collectors.toSet());
+
+    // If there are no replace locks for the task, simply fetch all visible 
segments for the interval
+    if (replaceLocksForTask.isEmpty()) {
+      return retrieveAllVisibleSegments(toolbox);
+    }
+
+    Map<Interval, Map<String, Set<DataSegment>>> intervalToCreatedToSegments = 
new HashMap<>();
+    for (Pair<DataSegment, String> segmentAndCreatedDate :
+        
toolbox.getIndexerMetadataStorageCoordinator().retrieveUsedSegmentsAndCreatedDates(dataSource,
 interval)) {
+      final DataSegment segment = segmentAndCreatedDate.lhs;
+      final String created = segmentAndCreatedDate.rhs;
+      intervalToCreatedToSegments.computeIfAbsent(segment.getInterval(), s -> 
new HashMap<>())
+                                 .computeIfAbsent(created, c -> new 
HashSet<>())
+                                 .add(segment);
+    }
+
+    Set<DataSegment> allSegmentsToBeReplaced = new HashSet<>();
+    for (final Map.Entry<Interval, Map<String, Set<DataSegment>>> entry : 
intervalToCreatedToSegments.entrySet()) {
+      final Interval segmentInterval = entry.getKey();
+      String lockVersion = null;
+      for (ReplaceTaskLock replaceLock : replaceLocksForTask) {
+        if (replaceLock.getInterval().contains(segmentInterval)) {
+          lockVersion = replaceLock.getVersion();
+        }
+      }
+      final Map<String, Set<DataSegment>> createdToSegmentsMap = 
entry.getValue();
+      for (Map.Entry<String, Set<DataSegment>> createdAndSegments : 
createdToSegmentsMap.entrySet()) {
+        if (lockVersion == null || 
lockVersion.compareTo(createdAndSegments.getKey()) > 0) {

Review Comment:
   is using string comparison fine since we want to compare dates? 



##########
indexing-service/src/main/java/org/apache/druid/indexing/common/actions/RetrieveSegmentsToReplaceAction.java:
##########
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexing.common.actions;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.druid.indexing.common.task.Task;
+import 
org.apache.druid.indexing.common.task.batch.parallel.AbstractBatchSubtask;
+import org.apache.druid.indexing.overlord.Segments;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.metadata.ReplaceTaskLock;
+import org.apache.druid.timeline.DataSegment;
+import org.apache.druid.timeline.Partitions;
+import org.apache.druid.timeline.SegmentTimeline;
+import org.joda.time.Interval;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This TaskAction returns a collection of segments which have data within the 
specified interval and are marked as
+ * used, and have been created before a REPLACE lock, if any, was acquired.
+ *
+ * The order of segments within the returned collection is unspecified, but 
each segment is guaranteed to appear in
+ * the collection only once.
+ */
+public class RetrieveSegmentsToReplaceAction implements 
TaskAction<Collection<DataSegment>>
+{
+  @JsonIgnore
+  private final String dataSource;
+
+  @JsonIgnore
+  private final Interval interval;
+
+  @JsonCreator
+  public RetrieveSegmentsToReplaceAction(
+      @JsonProperty("dataSource") String dataSource,
+      @JsonProperty("interval") Interval interval
+  )
+  {
+    this.dataSource = dataSource;
+    this.interval = interval;
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @JsonProperty
+  public Interval getInterval()
+  {
+    return interval;
+  }
+
+  @Override
+  public TypeReference<Collection<DataSegment>> getReturnTypeReference()
+  {
+    return new TypeReference<Collection<DataSegment>>() {};
+  }
+
+  @Override
+  public Collection<DataSegment> perform(Task task, TaskActionToolbox toolbox)
+  {
+    // The DruidInputSource can be used to read from one datasource and write 
to another.
+    // In such a case, the action can simply fetch all visible segments for 
the datasource and interval

Review Comment:
   ```suggestion
       // The DruidInputSource can be used to read from one datasource and 
write to another.
       // In such a case, the race condition described in class-level docs 
cannot occur and the action can simply fetch all visible segments for the 
datasource and interval
   ```



##########
indexing-service/src/main/java/org/apache/druid/indexing/common/actions/RetrieveSegmentsToReplaceAction.java:
##########
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.indexing.common.actions;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.druid.indexing.common.task.Task;
+import 
org.apache.druid.indexing.common.task.batch.parallel.AbstractBatchSubtask;
+import org.apache.druid.indexing.overlord.Segments;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.metadata.ReplaceTaskLock;
+import org.apache.druid.timeline.DataSegment;
+import org.apache.druid.timeline.Partitions;
+import org.apache.druid.timeline.SegmentTimeline;
+import org.joda.time.Interval;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * This TaskAction returns a collection of segments which have data within the 
specified interval and are marked as
+ * used, and have been created before a REPLACE lock, if any, was acquired.
+ *
+ * The order of segments within the returned collection is unspecified, but 
each segment is guaranteed to appear in
+ * the collection only once.
+ */
+public class RetrieveSegmentsToReplaceAction implements 
TaskAction<Collection<DataSegment>>
+{
+  @JsonIgnore
+  private final String dataSource;
+
+  @JsonIgnore
+  private final Interval interval;
+
+  @JsonCreator
+  public RetrieveSegmentsToReplaceAction(
+      @JsonProperty("dataSource") String dataSource,
+      @JsonProperty("interval") Interval interval
+  )
+  {
+    this.dataSource = dataSource;
+    this.interval = interval;
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @JsonProperty
+  public Interval getInterval()
+  {
+    return interval;
+  }
+
+  @Override
+  public TypeReference<Collection<DataSegment>> getReturnTypeReference()
+  {
+    return new TypeReference<Collection<DataSegment>>() {};
+  }
+
+  @Override
+  public Collection<DataSegment> perform(Task task, TaskActionToolbox toolbox)
+  {
+    // The DruidInputSource can be used to read from one datasource and write 
to another.
+    // In such a case, the action can simply fetch all visible segments for 
the datasource and interval
+    if (!task.getDataSource().equals(dataSource)) {
+      return retrieveAllVisibleSegments(toolbox);
+    }
+
+    final String supervisorId;
+    if (task instanceof AbstractBatchSubtask) {
+      supervisorId = ((AbstractBatchSubtask) task).getSupervisorTaskId();
+    } else {
+      supervisorId = task.getId();
+    }
+
+    final Set<ReplaceTaskLock> replaceLocksForTask = toolbox
+        .getTaskLockbox()
+        .getAllReplaceLocksForDatasource(task.getDataSource())
+        .stream()
+        .filter(lock -> supervisorId.equals(lock.getSupervisorTaskId()))
+        .collect(Collectors.toSet());
+
+    // If there are no replace locks for the task, simply fetch all visible 
segments for the interval
+    if (replaceLocksForTask.isEmpty()) {
+      return retrieveAllVisibleSegments(toolbox);
+    }
+
+    Map<Interval, Map<String, Set<DataSegment>>> intervalToCreatedToSegments = 
new HashMap<>();
+    for (Pair<DataSegment, String> segmentAndCreatedDate :
+        
toolbox.getIndexerMetadataStorageCoordinator().retrieveUsedSegmentsAndCreatedDates(dataSource,
 interval)) {
+      final DataSegment segment = segmentAndCreatedDate.lhs;
+      final String created = segmentAndCreatedDate.rhs;
+      intervalToCreatedToSegments.computeIfAbsent(segment.getInterval(), s -> 
new HashMap<>())
+                                 .computeIfAbsent(created, c -> new 
HashSet<>())
+                                 .add(segment);
+    }
+
+    Set<DataSegment> allSegmentsToBeReplaced = new HashSet<>();
+    for (final Map.Entry<Interval, Map<String, Set<DataSegment>>> entry : 
intervalToCreatedToSegments.entrySet()) {
+      final Interval segmentInterval = entry.getKey();
+      String lockVersion = null;
+      for (ReplaceTaskLock replaceLock : replaceLocksForTask) {
+        if (replaceLock.getInterval().contains(segmentInterval)) {
+          lockVersion = replaceLock.getVersion();
+        }
+      }
+      final Map<String, Set<DataSegment>> createdToSegmentsMap = 
entry.getValue();
+      for (Map.Entry<String, Set<DataSegment>> createdAndSegments : 
createdToSegmentsMap.entrySet()) {
+        if (lockVersion == null || 
lockVersion.compareTo(createdAndSegments.getKey()) > 0) {

Review Comment:
   if lockVersion is null, we should just not even get into for loop. 



##########
server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java:
##########
@@ -174,15 +174,76 @@ private Collection<DataSegment> doRetrieveUsedSegments(
   }
 
   @Override
-  public List<Pair<DataSegment, String>> 
retrieveUsedSegmentsAndCreatedDates(String dataSource)
+  public List<Pair<DataSegment, String>> 
retrieveUsedSegmentsAndCreatedDates(String dataSource, Interval interval)
   {
-    String rawQueryString = "SELECT created_date, payload FROM %1$s WHERE 
dataSource = :dataSource AND used = true";
-    final String queryString = StringUtils.format(rawQueryString, 
dbTables.getSegmentsTable());
+    StringBuilder queryBuilder = new StringBuilder(
+        "SELECT created_date, payload FROM %1$s WHERE dataSource = :dataSource 
AND used = true"
+    );
+
+    final boolean intervalStartIsEternityStart = 
Intervals.ETERNITY.getStart().equals(interval.getStart());

Review Comment:
   is this query fragment generation copied from somewhere? 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to