dampcake commented on a change in pull request #7490: Add reload by interval API
URL: https://github.com/apache/incubator-druid/pull/7490#discussion_r278201183
##########
File path:
server/src/main/java/org/apache/druid/metadata/SQLMetadataSegmentManager.java
##########
@@ -219,83 +218,144 @@ public void stop()
}
}
- @Override
- public boolean enableDataSource(final String dataSource)
+ private VersionedIntervalTimeline<String, DataSegment>
getVersionedIntervalTimeline(final String dataSource, final Interval interval)
{
- try {
- final IDBI dbi = connector.getDBI();
- VersionedIntervalTimeline<String, DataSegment> segmentTimeline =
connector.inReadOnlyTransaction(
- (handle, status) -> VersionedIntervalTimeline.forSegments(
- Iterators.transform(
- handle
- .createQuery(
- StringUtils.format(
- "SELECT payload FROM %s WHERE dataSource =
:dataSource",
- getSegmentsTable()
- )
- )
- .setFetchSize(connector.getStreamingFetchSize())
- .bind("dataSource", dataSource)
- .map(ByteArrayMapper.FIRST)
- .iterator(),
- payload -> {
- try {
- return jsonMapper.readValue(payload, DataSegment.class);
- }
- catch (IOException e) {
- throw new RuntimeException(e);
- }
+ return connector.inReadOnlyTransaction(
+ (handle, status) -> VersionedIntervalTimeline.forSegments(
+ Iterators.transform(
+ handle
+ .createQuery(
+ StringUtils.format(
+ "SELECT payload FROM %1$s WHERE dataSource =
:dataSource AND start >= :start AND %2$send%2$s <= :end",
+ getSegmentsTable(), connector.getQuoteString()
+ )
+ )
+ .setFetchSize(connector.getStreamingFetchSize())
+ .bind("dataSource", dataSource)
+ .bind("start", interval.getStart().toString())
+ .bind("end", interval.getEnd().toString())
+ .map(ByteArrayMapper.FIRST)
+ .iterator(),
+ payload -> {
+ try {
+ return jsonMapper.readValue(payload, DataSegment.class);
}
- )
-
- )
- );
-
- final List<DataSegment> segments = new ArrayList<>();
- List<TimelineObjectHolder<String, DataSegment>> timelineObjectHolders =
segmentTimeline.lookup(
- Intervals.of("0000-01-01/3000-01-01")
- );
- for (TimelineObjectHolder<String, DataSegment> objectHolder :
timelineObjectHolders) {
- for (PartitionChunk<DataSegment> partitionChunk :
objectHolder.getObject()) {
- segments.add(partitionChunk.getObject());
- }
- }
-
- if (segments.isEmpty()) {
- log.warn("No segments found in the database!");
- return false;
- }
-
- dbi.withHandle(
- new HandleCallback<Void>()
- {
- @Override
- public Void withHandle(Handle handle)
- {
- Batch batch = handle.createBatch();
+ catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ )
+ )
+ );
+ }
- for (DataSegment segment : segments) {
- batch.add(
+ private VersionedIntervalTimeline<String, DataSegment>
getVersionedIntervalTimeline(final String dataSource, final Collection<String>
segmentIds)
+ {
+ return connector.inReadOnlyTransaction(
+ (handle, status) ->
VersionedIntervalTimeline.forSegments(segmentIds.stream().map(segmentId -> {
+ try {
+ return jsonMapper.readValue(StreamSupport.stream(
+ handle.createQuery(
StringUtils.format(
- "UPDATE %s SET used=true WHERE id = '%s'",
- getSegmentsTable(),
- segment.getId()
+ "SELECT payload FROM %1$s WHERE dataSource =
:dataSource AND id = :id",
+ getSegmentsTable()
)
- );
- }
- batch.execute();
-
- return null;
- }
+ )
+ .setFetchSize(connector.getStreamingFetchSize())
+ .bind("dataSource", dataSource)
+ .bind("id", segmentId)
+ .map(ByteArrayMapper.FIRST)
+ .spliterator(), false
+ ).findFirst().orElseThrow(
+ () -> new UnknownSegmentIdException(StringUtils.format("Cannot
find segment id [%s]", segmentId))
+ ), DataSegment.class);
}
- );
+ catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }).collect(Collectors.toList()))
+ );
+ }
+
+ private Stream<SegmentId> segmentIdsForInterval(
+ final VersionedIntervalTimeline<String, DataSegment>
versionedIntervalTimeline,
+ final Interval interval
+ )
+ {
+ return versionedIntervalTimeline.lookup(interval).stream().flatMap(
+ objectHolder ->
StreamSupport.stream(objectHolder.getObject().spliterator(), false).map(
+ dataSegmentPartitionChunk ->
dataSegmentPartitionChunk.getObject().getId()
+ )
+ );
+ }
+
+ @Override
+ public boolean enableDataSource(final String dataSource)
+ {
+ try {
+ return enableSegments(dataSource, Intervals.ETERNITY) != 0;
}
catch (Exception e) {
log.error(e, "Exception enabling datasource %s", dataSource);
return false;
}
+ }
- return true;
+ @Override
+ public int enableSegments(final String dataSource, final Interval interval)
Review comment:
> For the segmentId case:
>
> 1. Create an empty timeline
> 2. Add the provided segments to the timeline
This requires a query to convert the segmentId Strings to actual segments.
Should this query be in the same transaction as step 4?
> 3. Using JodaUtils.condenseIntervals(), build a condensed list of
intervals from the intervals of the provided segment set
> 4. In a single transaction, issue a used=true + interval overlaps query
for each interval in the condensed intervals list constructed in 3 (could be a
single query with an OR clause for each condensed interval), and add these
used+overlapping segments to the timeline
> 5. For each provided segment, enable it if isOvershadowed is false
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]