This is an automated email from the ASF dual-hosted git repository.
jsedding pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push:
new 59bfcba067 OAK-11895 - CheckpointCompactor writes to "after" instead
of "onto" NodeState (#2549)
59bfcba067 is described below
commit 59bfcba06781f6a39cd0715c99325f34a544b043
Author: Julian Sedding <[email protected]>
AuthorDate: Fri Oct 10 16:48:53 2025 +0200
OAK-11895 - CheckpointCompactor writes to "after" instead of "onto"
NodeState (#2549)
---
.../oak/segment/CheckpointCompactor.java | 194 ++++++++++-----------
.../apache/jackrabbit/oak/segment/Compactor.java | 61 +++++++
...mpactor.java => LegacyCheckpointCompactor.java} | 27 ++-
.../segment/file/AbstractCompactionStrategy.java | 17 +-
.../segment/AbstractCompactorExternalBlobTest.java | 32 ++--
.../oak/segment/AbstractCompactorTest.java | 59 ++++---
.../CheckpointCompactorExternalBlobTest.java | 4 +-
.../oak/segment/CheckpointCompactorTest.java | 4 +-
.../segment/CompactToDifferentNodeStoreTest.java | 145 +++++++++++++++
.../jackrabbit/oak/segment/CompactorTestUtils.java | 16 +-
...est.java => LegacyCheckpointCompactorTest.java} | 14 +-
.../segment/ParallelCompactorExternalBlobTest.java | 17 +-
.../oak/segment/ParallelCompactorTest.java | 10 +-
13 files changed, 422 insertions(+), 178 deletions(-)
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
index 2385075503..a0fd40083b 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
@@ -19,21 +19,24 @@ package org.apache.jackrabbit.oak.segment;
import static java.util.Objects.requireNonNull;
+import static java.util.Objects.requireNonNullElseGet;
import static org.apache.jackrabbit.oak.commons.PathUtils.elements;
-import static org.apache.jackrabbit.oak.commons.PathUtils.getName;
-import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath;
import static
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.CHECKPOINTS;
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.ROOT;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.BiFunction;
+import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.commons.Buffer;
import org.apache.jackrabbit.oak.commons.conditions.Validate;
import org.apache.jackrabbit.oak.plugins.memory.MemoryChildNodeEntry;
@@ -79,102 +82,116 @@ public class CheckpointCompactor extends Compactor {
}
@Override
- public @Nullable CompactedNodeState compactDown(
+ public @Nullable CompactedNodeState compactDown(@NotNull NodeState before,
@NotNull NodeState after, @NotNull Canceller hardCanceller, @NotNull Canceller
softCanceller) throws IOException {
+ return doCompact(before, after, after, hardCanceller, softCanceller);
+ }
+
+ @Override
+ public @Nullable CompactedNodeState compact(@NotNull NodeState before,
@NotNull NodeState after, @NotNull NodeState onto, @NotNull Canceller
canceller) throws IOException {
+ return doCompact(before, after, onto, canceller, null);
+ }
+
+ /**
+ * Implementation that compacts checkpoints chronologically on top of each
other. The implementation
+ * supports both {@link #compactDown(NodeState, Canceller, Canceller)}
type and
+ * {@link #compactUp(NodeState, Canceller)} type operations.
+ * <p>
+ * Soft cancellation is only supported for {@link #compactDown(NodeState,
Canceller, Canceller)} type
+ * scenarios, i.e. when {@code after.equals(onto)}, and will return a
partially compacted state if cancelled.
+ * <p>
+ * Hard cancellation will abandon the compaction and return {@code null}.
+ * <p>
+ * If compaction completes successfully, a fully compacted state is
returned.
+ *
+ * @param before the node state to diff against from {@code after}
+ * @param after the node state diffed against {@code before}
+ * @param onto the node state to compact to apply the diff to
+ * @param hardCanceller the trigger for hard cancellation, will abandon
compaction if cancelled
+ * @param softCanceller the trigger for soft cancellation, will return
partially compacted state if cancelled; may only be set if {@code
after.equals(onto)}, implementations should validate the arguments
+ * @return a fully-compacted or partially-compacted node
state, or {@code null} if hard-cancelled
+ * @throws IOException will throw exception if any errors occur during
compaction
+ */
+ private @Nullable CompactedNodeState doCompact(
@NotNull NodeState before,
@NotNull NodeState after,
+ @NotNull NodeState onto,
@NotNull Canceller hardCanceller,
- @NotNull Canceller softCanceller
+ @Nullable Canceller softCanceller
) throws IOException {
- Iterator<Entry<String, NodeState>> iterator = collectRoots(before,
after).entrySet().iterator();
- Entry<String, NodeState> entry = iterator.next();
- String path = entry.getKey();
-
- // could already be in cache if compactor is reused
- CompactedNodeState compacted = cpCache.get(entry.getValue());
- gcListener.info("compacting {}.", path);
- if (compacted == null) {
- compacted = compactDownWithDelegate(getRoot(before),
entry.getValue(), hardCanceller, softCanceller);
- if (compacted == null) {
- return null;
- }
- }
+ Validate.checkArgument(softCanceller == null || Objects.equals(after,
onto),
+ "softCanceller is only supported for compactDown, i.e. when
Objects.equals(after, onto)");
- NodeBuilder builder = after.builder();
+ Set<String> superRoots = collectSuperRootPaths(before, after);
Buffer stableIdBytes =
requireNonNull(CompactorUtils.getStableIdBytes(after));
- getChild(builder, getParentPath(path)).setChildNode(getName(path),
compacted);
+ NodeBuilder rootBuilder = onto.builder();
+ CompactedNodeState compacted = null;
+ for (String path : superRoots) {
+ NodeBuilder builder = getDescendant(rootBuilder, path,
NodeBuilder::child);
+ NodeState afterSuperRoot = getDescendant(after, path,
NodeState::getChildNode);
- if (compacted.isComplete()) {
- cpCache.put(entry.getValue(), compacted);
- } else {
- return compactor.writeNodeState(builder.getNodeState(),
stableIdBytes, false);
- }
+ NodeState baseRoot = requireNonNullElseGet(compacted, () ->
getRoot(getDescendant(before, path, NodeState::getChildNode)));
+ NodeState ontoRoot = requireNonNullElseGet(compacted, () ->
getRoot(getDescendant(onto, path, NodeState::getChildNode)));
- before = entry.getValue();
-
- while (iterator.hasNext()) {
- entry = iterator.next();
- path = entry.getKey();
- gcListener.info("compacting {}.", path);
-
- compacted = compactWithCache(before, entry.getValue(), compacted,
hardCanceller);
+ compacted = compactRootState(baseRoot, getRoot(afterSuperRoot),
ontoRoot, hardCanceller, softCanceller);
if (compacted == null) {
+ // only happens for hard cancellation
return null;
}
- before = entry.getValue();
- Validate.checkState(compacted.isComplete());
- getChild(builder, getParentPath(path)).setChildNode(getName(path),
compacted);
+ Validate.checkState(compacted.isComplete() ||
isCancelled(softCanceller),
+ "compaction must be complete unless cancelled");
- if (softCanceller.check().isCancelled()) {
- return compactor.writeNodeState(builder.getNodeState(),
stableIdBytes, false);
+ builder.setChildNode(ROOT, compacted);
+ if (path.startsWith(CHECKPOINTS + '/')) {
+ compactCheckpointMetadata(builder, afterSuperRoot);
+ }
+
+ if (isCancelled(softCanceller)) {
+ break;
}
}
- return compactor.writeNodeState(builder.getNodeState(), stableIdBytes,
true);
+ return compactor.writeNodeState(rootBuilder.getNodeState(),
stableIdBytes, !isCancelled(softCanceller));
}
- @Override
- public @Nullable CompactedNodeState compact(
- @NotNull NodeState before,
- @NotNull NodeState after,
- @NotNull NodeState onto,
- @NotNull Canceller canceller
- ) throws IOException {
- LinkedHashMap<String, NodeState> roots = collectRoots(before, after);
-
- NodeBuilder builder = after.builder();
- Buffer stableIdBytes =
requireNonNull(CompactorUtils.getStableIdBytes(after));
-
- before = getRoot(before);
- onto = getRoot(onto);
+ private @Nullable CompactedNodeState compactRootState(@NotNull NodeState
baseRoot, @NotNull NodeState afterRoot, @NotNull NodeState ontoRoot, @NotNull
Canceller hardCanceller, @Nullable Canceller softCanceller) throws IOException {
+ if (Objects.equals(ontoRoot, afterRoot)) {
+ // down compaction only affects the first iteration, when
compacted == null and ontoRoot.equals(afterRoot).
+ return compactWithCache(baseRoot, afterRoot, ontoRoot,
hardCanceller, softCanceller);
+ } else {
+ // for subsequent iterations, ontoRoot == compacted and thus
ontoRoot.equals(afterRoot) no longer holds true.
+ return compactWithCache(baseRoot, afterRoot, ontoRoot,
hardCanceller, null);
+ }
+ }
- for (Entry<String, NodeState> entry : roots.entrySet()) {
- String path = entry.getKey();
- after = entry.getValue();
- CompactedNodeState compacted = compactWithCache(before, after,
onto, canceller);
- if (compacted == null) {
- return null;
- }
- Validate.checkState(compacted.isComplete());
- getChild(builder, getParentPath(path)).setChildNode(getName(path),
compacted);
- before = after;
- onto = compacted;
+ private void compactCheckpointMetadata(NodeBuilder builder, NodeState
afterSuperRoot) {
+ // copy checkpoint "properties" child node
+ NodeBuilder props = builder.setChildNode("properties");
+ for (PropertyState properties :
afterSuperRoot.getChildNode("properties").getProperties()) {
+ props.setProperty(compactor.compact(properties));
+ }
+ // copy checkpoint properties (on the parent of the root node)
+ for (PropertyState property : afterSuperRoot.getProperties()) {
+ builder.setProperty(compactor.compact(property));
}
+ }
- return compactor.writeNodeState(builder.getNodeState(), stableIdBytes,
true);
+ private static boolean isCancelled(@Nullable Canceller softCanceller) {
+ return softCanceller != null && softCanceller.check().isCancelled();
}
private @Nullable CompactedNodeState compactWithCache(
@NotNull NodeState before,
@NotNull NodeState after,
@NotNull NodeState onto,
- @NotNull Canceller canceller
+ @NotNull Canceller hardCanceller,
+ @Nullable Canceller softCanceller
) throws IOException {
CompactedNodeState compacted = cpCache.get(after);
if (compacted == null) {
- compacted = compactWithDelegate(before, after, onto, canceller);
- if (compacted != null) {
+ compacted = compactor.compact(before, after, onto, hardCanceller,
softCanceller);
+ if (compacted != null && compacted.isComplete()) {
cpCache.put(after, compacted);
}
} else {
@@ -188,7 +205,7 @@ public class CheckpointCompactor extends Compactor {
* state from a {@code superRoot}. This list consists of all checkpoints
followed by
* the root.
*/
- private @NotNull LinkedHashMap<String, NodeState> collectRoots(
+ private @NotNull LinkedHashSet<String> collectSuperRootPaths(
@NotNull NodeState superRootBefore,
@NotNull NodeState superRootAfter) {
List<ChildNodeEntry> checkpoints = new ArrayList<>();
@@ -208,48 +225,27 @@ public class CheckpointCompactor extends Compactor {
return Long.compare(c1, c2);
});
- LinkedHashMap<String, NodeState> roots = new LinkedHashMap<>();
+ LinkedHashSet<String> roots = new LinkedHashSet<>();
for (ChildNodeEntry checkpoint : checkpoints) {
String name = checkpoint.getName();
NodeState node = checkpoint.getNodeState();
gcListener.info("found checkpoint {} created on {}.",
name, new Date(node.getLong("created")));
- roots.put("checkpoints/" + name + "/root",
node.getChildNode("root"));
+ roots.add("checkpoints/" + name);
}
- roots.put("root", superRootAfter.getChildNode("root"));
+ roots.add("");
return roots;
}
private static @NotNull NodeState getRoot(@NotNull NodeState node) {
- return node.hasChildNode("root") ? node.getChildNode("root") :
EMPTY_NODE;
+ return node.hasChildNode(ROOT) ? node.getChildNode(ROOT) : EMPTY_NODE;
}
- private static @NotNull NodeBuilder getChild(NodeBuilder builder, String
path) {
+ private static <T> T getDescendant(T t, String path, BiFunction<T, String,
T> getChild) {
for (String name : elements(path)) {
- builder = builder.getChildNode(name);
+ t = getChild.apply(t, name);
}
- return builder;
- }
-
- /**
- * Delegate compaction to another, usually simpler, implementation.
- */
- private @Nullable CompactedNodeState compactDownWithDelegate(
- @NotNull NodeState before,
- @NotNull NodeState after,
- @NotNull Canceller hardCanceller,
- @NotNull Canceller softCanceller
- ) throws IOException {
- return compactor.compactDown(before, after, hardCanceller,
softCanceller);
- }
-
- private @Nullable CompactedNodeState compactWithDelegate(
- @NotNull NodeState before,
- @NotNull NodeState after,
- @NotNull NodeState onto,
- @NotNull Canceller canceller
- ) throws IOException {
- return compactor.compact(before, after, onto, canceller);
+ return t;
}
}
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Compactor.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Compactor.java
index e0dc1037b6..5e08b8496d 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Compactor.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/Compactor.java
@@ -29,7 +29,59 @@ import java.io.IOException;
import static
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+/**
+ * A compactor compacts the differences between two {@link NodeState}s
+ * ({@code before} and {@code after}) on top of a third {@link NodeState}
+ * ({@code onto}).
+ * <p>
+ * The compaction can be done in two ways:
+ * <ul>
+ * <li>Compacting down, where the differences between {@code before} and
+ * {@code after} are compacted on top of the {@code after} state. This is
+ * useful, because it allows for partial compactions. I.e. compactions that
+ * are "soft-cancelled" before the full compaction process completes.
+ * <p>
+ * During "down compaction", parts of the tree are compacted and the
respective
+ * node states are substituted with their compacted counterparts. This
results
+ * in an {@link #equals(Object)| equal} node state at any moment in the
process,
+ * which means that if the compaction is cancelled, a valid, albeit only
+ * partially compacted state can be returned.
+ * <p>
+ * After a partially completed "down compaction", the segments referenced
by the
+ * root record can be of different generations. However, the stable
identifier
+ * of the compacted records remain unchanged.
+ * </li>
+ * <li>Compacting up, where the differences between {@code before} and
+ * {@code after} are compacted on top of {@code before}. This is useful to
+ * create a new {@link NodeState} that contains the changes from
+ * {@code after} but is based on {@code before}, often on an empty
node-state.
+ * <p>
+ * "Up compaction" results in a fully compacted state, but it can only be
"hard-canceller",
+ * i.e. either the full compaction is done, or nothing at all ({@code
null} is returned
+ * if cancelled).
+ * <p>
+ * Generally, "up compaction" is more thorough than "down compaction",
because
+ * the entire content tree is rewritten. Furthermore, it guarantees that
after
+ * compaction all reachable segments are of the same, new generation.
+ * </li>
+ * </ul>
+ * <p>
+ * The compaction process can be cancelled through a {@link Canceller}. If
+ * cancellation is requested, the compaction will either abort completely or,
+ * in case of compacting down, return a partially compacted state.
+ */
public abstract class Compactor {
+
+ /**
+ * Convenience method to run {@link #compactDown(NodeState, NodeState,
Canceller, Canceller)},
+ * where the {@code before} state is the empty node state.
+ *
+ * @param state the (after) node state to compact
+ * @param hardCanceller the trigger for hard cancellation, will abandon
compaction if cancelled
+ * @param softCanceller the trigger for soft cancellation, will return
partially compacted state if cancelled
+ * @return the fully or partially compacted node state, or
{@code null} if hard-cancelled
+ * @throws IOException will throw exception if any errors occur during
compaction
+ */
public final @Nullable CompactedNodeState compactDown(
@NotNull NodeState state,
@NotNull Canceller hardCanceller,
@@ -54,6 +106,15 @@ public abstract class Compactor {
@NotNull Canceller softCanceller
) throws IOException;
+ /**
+ * Convenience method to run {@link #compactUp(NodeState, NodeState,
Canceller)},
+ * where the {@code before} state is the empty node state.
+ *
+ * @param state the (after) node state to compact
+ * @param canceller the trigger for hard cancellation, will abandon
compaction if cancelled
+ * @return the fully compacted node state, or {@code null} if
hard-cancelled
+ * @throws IOException will throw exception if any errors occur during
compaction
+ */
public final @Nullable CompactedNodeState compactUp(
@NotNull NodeState state,
@NotNull Canceller canceller
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/LegacyCheckpointCompactor.java
similarity index 90%
copy from
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
copy to
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/LegacyCheckpointCompactor.java
index 2385075503..897c46c16f 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/CheckpointCompactor.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/LegacyCheckpointCompactor.java
@@ -48,6 +48,15 @@ import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
+ * <b>
+ * Note: this implementation is a copy of the code before the fix for
OAK-11985. This
+ * implementation is left here in case of new issues introduced with
OAK-11985. In
+ * {@code
org.apache.jackrabbit.oak.segment.file.AbstractCompactionStrategy} a system
+ * property based toggle ("oak.compaction.legacy", boolean, default value
"false")
+ * is implemented to switch between {@code CheckpointCompactor} and
+ * {@code LegacyCheckpointCompactor}.
+ * </b>
+ *
* This compactor implementation is aware of the checkpoints in the repository.
* It uses this information to further optimise the compaction result by
* <ul>
@@ -57,8 +66,16 @@ import org.jetbrains.annotations.Nullable;
* <li>Caching the compacted checkpoints and root states for deduplication
should
* the same checkpoint or root state occur again in a later compaction
retry cycle.
* </ul>
+ *
+ * @see CheckpointCompactor
+ * @deprecated Use {@link
org.apache.jackrabbit.oak.segment.CheckpointCompactor} instead.
+ * @since 1.86.0 (OAK-11985)
*/
-public class CheckpointCompactor extends Compactor {
+@Deprecated(since = "1.88.0")
+public class LegacyCheckpointCompactor extends Compactor {
+
+ private static final String CREATED = "created";
+
protected final @NotNull GCMonitor gcListener;
private final @NotNull Map<NodeState, CompactedNodeState> cpCache = new
HashMap<>();
@@ -71,7 +88,7 @@ public class CheckpointCompactor extends Compactor {
* @param gcListener listener receiving notifications about the
garbage collection process
* @param compactor the delegate compactor to use for the actual
compaction work
*/
- public CheckpointCompactor(
+ public LegacyCheckpointCompactor(
@NotNull GCMonitor gcListener,
@NotNull ClassicCompactor compactor) {
this.gcListener = gcListener;
@@ -203,8 +220,8 @@ public class CheckpointCompactor extends Compactor {
);
checkpoints.sort((cne1, cne2) -> {
- long c1 = cne1.getNodeState().getLong("created");
- long c2 = cne2.getNodeState().getLong("created");
+ long c1 = cne1.getNodeState().getLong(CREATED);
+ long c2 = cne2.getNodeState().getLong(CREATED);
return Long.compare(c1, c2);
});
@@ -213,7 +230,7 @@ public class CheckpointCompactor extends Compactor {
String name = checkpoint.getName();
NodeState node = checkpoint.getNodeState();
gcListener.info("found checkpoint {} created on {}.",
- name, new Date(node.getLong("created")));
+ name, new Date(node.getLong(CREATED)));
roots.put("checkpoints/" + name + "/root",
node.getChildNode("root"));
}
roots.put("root", superRootAfter.getChildNode("root"));
diff --git
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/AbstractCompactionStrategy.java
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/AbstractCompactionStrategy.java
index 19df646df4..87f0894fc1 100644
---
a/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/AbstractCompactionStrategy.java
+++
b/oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/AbstractCompactionStrategy.java
@@ -27,9 +27,11 @@ import static
org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPA
import static
org.apache.jackrabbit.oak.segment.file.TarRevisions.EXPEDITE_OPTION;
import static org.apache.jackrabbit.oak.segment.file.TarRevisions.timeout;
+import org.apache.jackrabbit.oak.commons.properties.SystemPropertySupplier;
import org.apache.jackrabbit.oak.segment.CheckpointCompactor;
import org.apache.jackrabbit.oak.segment.ClassicCompactor;
import org.apache.jackrabbit.oak.segment.Compactor;
+import org.apache.jackrabbit.oak.segment.LegacyCheckpointCompactor;
import org.apache.jackrabbit.oak.segment.ParallelCompactor;
import org.apache.jackrabbit.oak.segment.RecordId;
import org.apache.jackrabbit.oak.segment.SegmentNodeState;
@@ -42,6 +44,7 @@ import org.apache.jackrabbit.oak.spi.state.NodeState;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.BooleanSupplier;
abstract class AbstractCompactionStrategy implements CompactionStrategy {
@@ -290,14 +293,26 @@ abstract class AbstractCompactionStrategy implements
CompactionStrategy {
}
}
- private Compactor newCompactor(Context context, CompactionWriter writer) {
+ @SuppressWarnings("deprecation")
+ private static Compactor newCompactor(Context context, CompactionWriter
writer) {
CompactorType compactorType =
context.getGCOptions().getCompactorType();
+ BooleanSupplier useLegacyImpl = SystemPropertySupplier
+ .create("oak.compaction.legacy", Boolean.FALSE)::get;
switch (compactorType) {
case PARALLEL_COMPACTOR:
+ if (useLegacyImpl.getAsBoolean()) {
+ return new
LegacyCheckpointCompactor(context.getGCListener(),
+ new ParallelCompactor(context.getGCListener(),
writer, context.getCompactionMonitor(),
+ context.getGCOptions().getConcurrency()));
+ }
return new CheckpointCompactor(context.getGCListener(),
new ParallelCompactor(context.getGCListener(), writer,
context.getCompactionMonitor(),
context.getGCOptions().getConcurrency()));
case CHECKPOINT_COMPACTOR:
+ if (useLegacyImpl.getAsBoolean()) {
+ return new
LegacyCheckpointCompactor(context.getGCListener(),
+ new ClassicCompactor(writer,
context.getCompactionMonitor()));
+ }
return new CheckpointCompactor(context.getGCListener(),
new ClassicCompactor(writer,
context.getCompactionMonitor()));
case CLASSIC_COMPACTOR:
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorExternalBlobTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorExternalBlobTest.java
index a6aac0da21..0172b5f6a2 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorExternalBlobTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorExternalBlobTest.java
@@ -19,7 +19,6 @@
package org.apache.jackrabbit.oak.segment;
import static java.util.concurrent.TimeUnit.DAYS;
-import static
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompactor;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompactorFactory;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.addTestContent;
@@ -27,7 +26,8 @@ import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.assertSameRec
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.assertSameStableId;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.checkGeneration;
import static org.apache.jackrabbit.oak.segment.CompactorTestUtils.createBlob;
-import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.getCheckpoint;
+import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.getCheckpointRoot;
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.ROOT;
import static
org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import static
org.apache.jackrabbit.oak.segment.file.tar.GCGeneration.newGCGeneration;
import static org.junit.Assert.assertNotNull;
@@ -56,8 +56,6 @@ import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
@RunWith(Parameterized.class)
public abstract class AbstractCompactorExternalBlobTest {
@@ -80,16 +78,12 @@ public abstract class AbstractCompactorExternalBlobTest {
public RuleChain rules = RuleChain.outerRule(folder)
.around(temporaryBlobStore);
- @Parameterized.Parameters
- public static List<SimpleCompactorFactory> compactorFactories() {
- return Arrays.asList(
- compactor -> compactor::compactUp,
- compactor -> (node, canceller) -> compactor.compactDown(node,
canceller, canceller),
- compactor -> (node, canceller) ->
compactor.compact(EMPTY_NODE, node, EMPTY_NODE, canceller)
- );
+ @Parameterized.Parameters(name = "{index}: {0}")
+ public static Iterable<Object[]> compactorFactories() {
+ return AbstractCompactorTest.compactorFactories();
}
- public AbstractCompactorExternalBlobTest(@NotNull SimpleCompactorFactory
compactorFactory) {
+ public AbstractCompactorExternalBlobTest(@SuppressWarnings({"unused",
"java:S1172"}) String name, @NotNull SimpleCompactorFactory compactorFactory) {
this.compactorFactory = compactorFactory;
}
@@ -146,12 +140,12 @@ public abstract class AbstractCompactorExternalBlobTest {
checkGeneration(compacted1, compactedGeneration);
assertSameStableId(uncompacted1, compacted1);
- assertSameStableId(getCheckpoint(uncompacted1, cp1),
getCheckpoint(compacted1, cp1));
- assertSameStableId(getCheckpoint(uncompacted1, cp2),
getCheckpoint(compacted1, cp2));
- assertSameStableId(getCheckpoint(uncompacted1, cp3),
getCheckpoint(compacted1, cp3));
- assertSameStableId(getCheckpoint(uncompacted1, cp4),
getCheckpoint(compacted1, cp4));
- assertSameStableId(getCheckpoint(uncompacted1, cp5),
getCheckpoint(compacted1, cp5));
- assertSameRecord(getCheckpoint(compacted1, cp5),
compacted1.getChildNode("root"));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp1),
getCheckpointRoot(compacted1, cp1));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp2),
getCheckpointRoot(compacted1, cp2));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp3),
getCheckpointRoot(compacted1, cp3));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp4),
getCheckpointRoot(compacted1, cp4));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp5),
getCheckpointRoot(compacted1, cp5));
+ assertSameRecord(getCheckpointRoot(compacted1, cp5),
compacted1.getChildNode(ROOT));
}
private static void updateTestContent(@NotNull String parent, @NotNull
NodeStore nodeStore)
@@ -162,4 +156,4 @@ public abstract class AbstractCompactorExternalBlobTest {
nodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
-}
\ No newline at end of file
+}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorTest.java
index 298318e362..c029240fc2 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/AbstractCompactorTest.java
@@ -19,14 +19,15 @@
package org.apache.jackrabbit.oak.segment;
import static java.util.concurrent.TimeUnit.DAYS;
-import static
org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompactor;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompactorFactory;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.addTestContent;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.assertSameRecord;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.assertSameStableId;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.checkGeneration;
-import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.getCheckpoint;
+import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.getCheckpointRoot;
+import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.getCheckpointSuperRoot;
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.ROOT;
import static
org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -39,7 +40,6 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
-import java.util.List;
import org.apache.jackrabbit.oak.segment.file.CompactedNodeState;
import org.apache.jackrabbit.oak.segment.file.FileStore;
@@ -78,16 +78,15 @@ public abstract class AbstractCompactorTest {
private GCGeneration partialGeneration;
private GCGeneration targetGeneration;
- @Parameterized.Parameters
- public static List<SimpleCompactorFactory> compactorFactories() {
+ @Parameterized.Parameters(name = "{index}: {0}")
+ public static Iterable<Object[]> compactorFactories() {
return Arrays.asList(
- compactor -> compactor::compactUp,
- compactor -> (node, canceller) -> compactor.compactDown(node,
canceller, canceller),
- compactor -> (node, canceller) ->
compactor.compact(EMPTY_NODE, node, EMPTY_NODE, canceller)
+ new Object[] {"compactUp", ((SimpleCompactorFactory)compactor
-> compactor::compactUp)},
+ new Object[] {"compactDown",
((SimpleCompactorFactory)compactor -> (node, canceller) ->
compactor.compactDown(node, canceller, canceller))}
);
}
- public AbstractCompactorTest(@NotNull SimpleCompactorFactory
compactorFactory) {
+ public AbstractCompactorTest(@SuppressWarnings({"unused", "java:S1172"})
String name, @NotNull SimpleCompactorFactory compactorFactory) {
this.compactorFactory = compactorFactory;
}
@@ -130,9 +129,9 @@ public abstract class AbstractCompactorTest {
checkGeneration(compacted1, targetGeneration);
assertSameStableId(uncompacted1, compacted1);
- assertSameStableId(getCheckpoint(uncompacted1, cp1),
getCheckpoint(compacted1, cp1));
- assertSameStableId(getCheckpoint(uncompacted1, cp2),
getCheckpoint(compacted1, cp2));
- assertSameRecord(getCheckpoint(compacted1, cp2),
compacted1.getChildNode("root"));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp1),
getCheckpointRoot(compacted1, cp1));
+ assertSameStableId(getCheckpointRoot(uncompacted1, cp2),
getCheckpointRoot(compacted1, cp2));
+ assertSameRecord(getCheckpointRoot(compacted1, cp2),
compacted1.getChildNode(ROOT));
// Simulate a 2nd compaction cycle
addTestContent("cp3", nodeStore, 42);
@@ -141,22 +140,39 @@ public abstract class AbstractCompactorTest {
String cp4 = nodeStore.checkpoint(DAYS.toMillis(1));
SegmentNodeState uncompacted2 = fileStore.getHead();
- SegmentNodeState compacted2 = compactor.compact(uncompacted1,
uncompacted2, compacted1, Canceller.newCanceller());
+
+ // A new generation is needed for compaction, because the previous
compaction was fully-compacted.
+ // If the previous compaction had been partial, the same target
generation could be used.
+ GCGeneration secondTargetGeneration = targetGeneration.nextFull();
+ compactor = createCompactor(fileStore, new
GCIncrement(targetGeneration, targetGeneration.nextPartial(),
secondTargetGeneration), compactionMonitor);
+ SegmentNodeState compacted2 = compactor.compact(compacted1,
uncompacted2, compacted1, Canceller.newCanceller());
+
assertNotNull(compacted2);
assertNotSame(uncompacted2, compacted2);
- checkGeneration(compacted2, targetGeneration);
+ checkGeneration(compacted2, secondTargetGeneration);
assertTrue(fileStore.getRevisions().setHead(uncompacted2.getRecordId(),
compacted2.getRecordId()));
+ // verify that all checkpoints, including the checkpoint-properties,
are copied over correctly
+ assertEquals(getCheckpointSuperRoot(uncompacted1, cp1),
getCheckpointSuperRoot(compacted2, cp1));
+ assertEquals(getCheckpointSuperRoot(uncompacted1, cp2),
getCheckpointSuperRoot(compacted2, cp2));
+ assertEquals(getCheckpointSuperRoot(uncompacted2, cp3),
getCheckpointSuperRoot(compacted2, cp3));
+ assertEquals(getCheckpointSuperRoot(uncompacted2, cp4),
getCheckpointSuperRoot(compacted2, cp4));
+
+ // verify that the entire tree has the same content
assertEquals(uncompacted2, compacted2);
+
+ // the super root and all root nodes should have the same stable-id
assertSameStableId(uncompacted2, compacted2);
- assertSameStableId(getCheckpoint(uncompacted2, cp1),
getCheckpoint(compacted2, cp1));
- assertSameStableId(getCheckpoint(uncompacted2, cp2),
getCheckpoint(compacted2, cp2));
- assertSameStableId(getCheckpoint(uncompacted2, cp3),
getCheckpoint(compacted2, cp3));
- assertSameStableId(getCheckpoint(uncompacted2, cp4),
getCheckpoint(compacted2, cp4));
- assertSameRecord(getCheckpoint(compacted1, cp1),
getCheckpoint(compacted2, cp1));
- assertSameRecord(getCheckpoint(compacted1, cp2),
getCheckpoint(compacted2, cp2));
- assertSameRecord(getCheckpoint(compacted2, cp4),
compacted2.getChildNode("root"));
+ assertSameStableId(uncompacted2.getChildNode(ROOT),
compacted2.getChildNode(ROOT));
+ assertSameStableId(getCheckpointRoot(uncompacted2, cp1),
getCheckpointRoot(compacted2, cp1));
+ assertSameStableId(getCheckpointRoot(uncompacted2, cp2),
getCheckpointRoot(compacted2, cp2));
+ assertSameStableId(getCheckpointRoot(uncompacted2, cp3),
getCheckpointRoot(compacted2, cp3));
+ assertSameStableId(getCheckpointRoot(uncompacted2, cp4),
getCheckpointRoot(compacted2, cp4));
+
+ // The root of checkpoint 4 and the current root should be
deduplicated,
+ // as there were no changes after the checkpoint was taken.
+ assertSameRecord(getCheckpointRoot(compacted2, cp4),
compacted2.getChildNode(ROOT));
}
@Test
@@ -199,5 +215,6 @@ public abstract class AbstractCompactorTest {
assertNotNull(compacted2);
assertTrue(compacted2.isComplete());
checkGeneration(compacted2, targetGeneration);
+ assertEquals(uncompacted1, compacted2);
}
}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorExternalBlobTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorExternalBlobTest.java
index 955eaa2952..ac3d880819 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorExternalBlobTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorExternalBlobTest.java
@@ -32,8 +32,8 @@ import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompact
@RunWith(Parameterized.class)
public class CheckpointCompactorExternalBlobTest extends
AbstractCompactorExternalBlobTest {
- public CheckpointCompactorExternalBlobTest(@NotNull SimpleCompactorFactory
compactorFactory) {
- super(compactorFactory);
+ public CheckpointCompactorExternalBlobTest(String name, @NotNull
SimpleCompactorFactory compactorFactory) {
+ super(name, compactorFactory);
}
@Override
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
index 80b18a8be0..34a68869f6 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
@@ -32,8 +32,8 @@ import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompact
@RunWith(Parameterized.class)
public class CheckpointCompactorTest extends AbstractCompactorTest {
- public CheckpointCompactorTest(@NotNull SimpleCompactorFactory
compactorFactory) {
- super(compactorFactory);
+ public CheckpointCompactorTest(String name, @NotNull
SimpleCompactorFactory compactorFactory) {
+ super(name, compactorFactory);
}
@Override
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactToDifferentNodeStoreTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactToDifferentNodeStoreTest.java
new file mode 100644
index 0000000000..939aeb767e
--- /dev/null
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactToDifferentNodeStoreTest.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.segment;
+
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.api.Type;
+import org.apache.jackrabbit.oak.plugins.memory.BinaryPropertyState;
+import org.apache.jackrabbit.oak.segment.file.CompactedNodeState;
+import org.apache.jackrabbit.oak.segment.file.CompactionWriter;
+import org.apache.jackrabbit.oak.segment.file.FileStore;
+import org.apache.jackrabbit.oak.segment.file.GCNodeWriteMonitor;
+import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
+import org.apache.jackrabbit.oak.segment.file.ReadOnlyFileStore;
+import org.apache.jackrabbit.oak.segment.file.cancel.Canceller;
+import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration;
+import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
+import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
+import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
+import org.apache.jackrabbit.oak.spi.gc.LoggingGCMonitor;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.Arrays;
+
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.ROOT;
+import static
org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
+import static org.junit.Assert.assertEquals;
+
+@RunWith(Parameterized.class)
+public class CompactToDifferentNodeStoreTest {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(CompactToDifferentNodeStoreTest.class);
+
+ private final CompactorFactory compactorFactory;
+
+ @Rule
+ public TemporaryFolder folder = new TemporaryFolder(new File("target"));
+
+ public CompactToDifferentNodeStoreTest(@SuppressWarnings({"unused",
"java:S1172"}) String name, CompactorFactory compactorFactory) {
+ this.compactorFactory = compactorFactory;
+ }
+
+ public interface CompactorFactory {
+ Compactor createCompactor(GCMonitor gcListener, CompactionWriter
writer, GCNodeWriteMonitor compactionMonitor);
+ }
+
+ @Parameterized.Parameters(name = "{0}")
+ public static Iterable<Object[]> compactorFactories() {
+ return Arrays.asList(
+ new Object[] { "ClassicCompactor", (CompactorFactory)
(gcListener, writer, compactionMonitor) -> new ClassicCompactor(writer,
compactionMonitor) },
+ new Object[] { "CheckpointCompactor", (CompactorFactory)
(gcListener, writer, compactionMonitor) -> new CheckpointCompactor(gcListener,
new ClassicCompactor(writer, compactionMonitor)) },
+ new Object[] { "CheckpointCompactor-parallel",
(CompactorFactory) (gcListener, writer, compactionMonitor) -> new
CheckpointCompactor(gcListener, new ParallelCompactor(gcListener, writer,
compactionMonitor, 4)) }
+ );
+ }
+
+ @Test
+ public void testCompactCopy() throws InvalidFileStoreVersionException,
IOException, CommitFailedException {
+ File sourceFolder = folder.newFolder("source");
+ createGarbage(sourceFolder);
+
+ File targetFolder = folder.newFolder("target");
+
+ try (ReadOnlyFileStore compactionSource =
fileStoreBuilder(sourceFolder).buildReadOnly()) {
+
+ CompactedNodeState compactedNodeState;
+ try (FileStore compactionTarget =
fileStoreBuilder(targetFolder).build()) {
+ compactedNodeState = compact(compactionSource,
compactionTarget);
+ }
+
+ try (ReadOnlyFileStore result =
fileStoreBuilder(targetFolder).buildReadOnly()) {
+ SegmentNodeStore s =
SegmentNodeStoreBuilders.builder(compactionSource).build();
+ SegmentNodeStore t =
SegmentNodeStoreBuilders.builder(result).build();
+
+ assertEquals("value100",
compactedNodeState.getChildNode(ROOT).getChildNode("test").getProperty("property").getValue(Type.STRING));
+ assertEquals(
+
s.getRoot().getChildNode("garbage").getProperty("binary"),
+
compactedNodeState.getChildNode(ROOT).getChildNode("garbage").getProperty("binary"));
+ assertEquals(
+
s.getRoot().getChildNode("garbage").getProperty("binary"),
+
t.getRoot().getChildNode("garbage").getProperty("binary"));
+ }
+ }
+ }
+
+ private CompactedNodeState compact(ReadOnlyFileStore source, FileStore
target) {
+ GCGeneration headGeneration =
target.getHead().getRecordId().getSegment().getGcGeneration();
+ CompactionWriter writer = new CompactionWriter(target.getReader(),
target.getBlobStore(), headGeneration, target.getWriter());
+ GCMonitor gcListener = new LoggingGCMonitor(LOG);
+ GCNodeWriteMonitor compactionMonitor = new GCNodeWriteMonitor(1,
gcListener);
+ Compactor compactor = compactorFactory.createCompactor(gcListener,
writer, compactionMonitor);
+ try {
+ CompactedNodeState compactedNodeState =
compactor.compactUp(target.getHead(), source.getHead(),
Canceller.newCanceller());
+ if (compactedNodeState != null) {
+ target.getRevisions().setHead(target.getHead().getRecordId(),
compactedNodeState.getRecordId());
+ }
+ return compactedNodeState;
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ private static void createGarbage(File source) throws
InvalidFileStoreVersionException, IOException, CommitFailedException {
+ try (FileStore store = fileStoreBuilder(source).build()) {
+ SegmentNodeStore nodeStore =
SegmentNodeStoreBuilders.builder(store).build();
+ for (int i = 0; i <= 100; i++) {
+ NodeBuilder builder = nodeStore.getRoot().builder();
+ builder.child("test").setProperty("property", "value" + i);
+ if (builder.hasChildNode("garbage")) {
+ builder.getChildNode("garbage").remove();
+ } else {
+
builder.child("garbage").setProperty(BinaryPropertyState.binaryProperty("binary",
RandomStringUtils.insecure().nextPrint(10_000)));
+ }
+ nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
+ store.flush();
+ }
+ }
+ }
+
+}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactorTestUtils.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactorTestUtils.java
index 30d804f657..88e1ff74b3 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactorTestUtils.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CompactorTestUtils.java
@@ -18,6 +18,8 @@
package org.apache.jackrabbit.oak.segment;
import static
org.apache.jackrabbit.oak.plugins.memory.MultiBinaryPropertyState.binaryPropertyFromBlob;
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.CHECKPOINTS;
+import static org.apache.jackrabbit.oak.segment.SegmentNodeStore.ROOT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -61,11 +63,17 @@ public class CompactorTestUtils {
}
}
- public static NodeState getCheckpoint(NodeState superRoot, String name) {
+ public static NodeState getCheckpointSuperRoot(NodeState superRoot, String
name) {
NodeState checkpoint = superRoot
- .getChildNode("checkpoints")
- .getChildNode(name)
- .getChildNode("root");
+ .getChildNode(CHECKPOINTS)
+ .getChildNode(name);
+ assertTrue(checkpoint.exists());
+ return checkpoint;
+ }
+
+ public static NodeState getCheckpointRoot(NodeState superRoot, String
name) {
+ NodeState checkpoint = getCheckpointSuperRoot(superRoot, name)
+ .getChildNode(ROOT);
assertTrue(checkpoint.exists());
return checkpoint;
}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/LegacyCheckpointCompactorTest.java
similarity index 83%
copy from
oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
copy to
oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/LegacyCheckpointCompactorTest.java
index 80b18a8be0..75c9f41fbf 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/CheckpointCompactorTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/LegacyCheckpointCompactorTest.java
@@ -18,26 +18,26 @@
package org.apache.jackrabbit.oak.segment;
+import org.apache.jackrabbit.oak.segment.file.CompactionWriter;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.GCIncrement;
import org.apache.jackrabbit.oak.segment.file.GCNodeWriteMonitor;
-import org.apache.jackrabbit.oak.segment.file.CompactionWriter;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.jetbrains.annotations.NotNull;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import static
org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder;
import static
org.apache.jackrabbit.oak.segment.CompactorTestUtils.SimpleCompactorFactory;
+import static
org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder;
@RunWith(Parameterized.class)
-public class CheckpointCompactorTest extends AbstractCompactorTest {
- public CheckpointCompactorTest(@NotNull SimpleCompactorFactory
compactorFactory) {
- super(compactorFactory);
+public class LegacyCheckpointCompactorTest extends AbstractCompactorTest {
+ public LegacyCheckpointCompactorTest(String name, @NotNull
SimpleCompactorFactory compactorFactory) {
+ super(name, compactorFactory);
}
@Override
- protected CheckpointCompactor createCompactor(
+ protected Compactor createCompactor(
@NotNull FileStore fileStore,
@NotNull GCIncrement increment,
@NotNull GCNodeWriteMonitor compactionMonitor
@@ -46,6 +46,6 @@ public class CheckpointCompactorTest extends
AbstractCompactorTest {
.withGeneration(generation)
.build(fileStore);
CompactionWriter compactionWriter = new
CompactionWriter(fileStore.getReader(), fileStore.getBlobStore(), increment,
writerFactory);
- return new CheckpointCompactor(GCMonitor.EMPTY, new
ClassicCompactor(compactionWriter, compactionMonitor));
+ return new LegacyCheckpointCompactor(GCMonitor.EMPTY, new
ClassicCompactor(compactionWriter, compactionMonitor));
}
}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorExternalBlobTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorExternalBlobTest.java
index 06a26bb3f9..8346363815 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorExternalBlobTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorExternalBlobTest.java
@@ -27,7 +27,6 @@ import org.jetbrains.annotations.NotNull;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import java.util.ArrayList;
import java.util.List;
import static
org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder;
@@ -38,21 +37,13 @@ public class ParallelCompactorExternalBlobTest extends
AbstractCompactorExternal
private final int concurrency;
- @Parameterized.Parameters
+ @Parameterized.Parameters(name = "{index}: {0} concurrency={2}")
public static List<Object[]> parameters() {
- Integer[] concurrencyLevels = {1, 2, 4, 8, 16};
-
- List<Object[]> parameters = new ArrayList<>();
- for (SimpleCompactorFactory factory :
AbstractCompactorExternalBlobTest.compactorFactories()) {
- for (int concurrency : concurrencyLevels) {
- parameters.add(new Object[]{factory, concurrency});
- }
- }
- return parameters;
+ return ParallelCompactorTest.parameters();
}
- public ParallelCompactorExternalBlobTest(@NotNull SimpleCompactorFactory
compactorFactory, int concurrency) {
- super(compactorFactory);
+ public ParallelCompactorExternalBlobTest(String name, @NotNull
SimpleCompactorFactory compactorFactory, int concurrency) {
+ super(name, compactorFactory);
this.concurrency = concurrency;
}
diff --git
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorTest.java
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorTest.java
index f4e4d81ae8..28062d34b2 100644
---
a/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorTest.java
+++
b/oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/ParallelCompactorTest.java
@@ -38,21 +38,21 @@ public class ParallelCompactorTest extends
AbstractCompactorTest {
private final int concurrency;
- @Parameterized.Parameters
+ @Parameterized.Parameters(name = "{index}: {0} concurrency={2}")
public static List<Object[]> parameters() {
Integer[] concurrencyLevels = {1, 2, 4, 8, 16};
List<Object[]> parameters = new ArrayList<>();
- for (SimpleCompactorFactory factory :
AbstractCompactorExternalBlobTest.compactorFactories()) {
+ for (Object[] args : AbstractCompactorTest.compactorFactories()) {
for (int concurrency : concurrencyLevels) {
- parameters.add(new Object[]{factory, concurrency});
+ parameters.add(new Object[]{args[0], args[1], concurrency});
}
}
return parameters;
}
- public ParallelCompactorTest(@NotNull SimpleCompactorFactory
compactorFactory, int concurrency) {
- super(compactorFactory);
+ public ParallelCompactorTest(String name, @NotNull SimpleCompactorFactory
compactorFactory, int concurrency) {
+ super(name, compactorFactory);
this.concurrency = concurrency;
}