This is an automated email from the ASF dual-hosted git repository.
thomasm pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/jackrabbit-oak.git
The following commit(s) were added to refs/heads/trunk by this push:
new 0178047ee9 OAK-11478 Node store statistics: support the tree store
(#2071)
0178047ee9 is described below
commit 0178047ee95c8d71e83e8e1cb7afd22cba772414
Author: Thomas Mueller <[email protected]>
AuthorDate: Mon Mar 24 08:03:11 2025 +0100
OAK-11478 Node store statistics: support the tree store (#2071)
* OAK-11478 Node store statistics: support the tree store
* OAK-11478 Node store statistics: support the tree store
* OAK-11478 Node store statistics: support the tree store
---
.../apache/jackrabbit/oak/commons/Profiler.java | 9 +-
.../document/flatfile/analysis/StatsBuilder.java | 64 ++++----
.../analysis/modules/DistinctBinarySize.java | 8 +-
.../analysis/stream/NodeStreamConverter.java | 103 -------------
.../stream/NodeStreamConverterCompressed.java | 134 -----------------
.../flatfile/analysis/stream/NodeStreamReader.java | 166 ---------------------
.../stream/NodeStreamReaderCompressed.java | 155 -------------------
...odeLineReader.java => NodeTreeStoreReader.java} | 130 +++++++---------
.../flatfile/analysis/stream/NodeStreamTest.java | 98 ------------
.../flatfile/analysis/stream/VarIntTest.java | 45 ------
.../oak/index/merge/IndexStoreCommand.java | 4 +-
.../oak/index/merge/IndexStoreStatsCommand.java | 75 ++++++++++
.../apache/jackrabbit/oak/run/AvailableModes.java | 4 +-
13 files changed, 175 insertions(+), 820 deletions(-)
diff --git
a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/Profiler.java
b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/Profiler.java
index bd5044d5d1..f8e131306d 100644
--- a/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/Profiler.java
+++ b/oak-commons/src/main/java/org/apache/jackrabbit/oak/commons/Profiler.java
@@ -57,6 +57,7 @@ public class Profiler implements Runnable {
"sun," +
"com.sun.," +
"com.mongodb.," +
+ "org.apache.jackrabbit.oak.commons.Profiler.," +
"org.bson.,"
).split(",");
private final String[] ignorePackages = (
@@ -380,7 +381,11 @@ public class Profiler implements Runnable {
private void processList(List<Object[]> list) {
for (Object[] dump : list) {
- if (startsWithAny(dump[0].toString(), ignoreThreads)) {
+ String el = dump[0].toString();
+ if (el.startsWith("app//")) {
+ el = el.substring("app//".length());
+ }
+ if (startsWithAny(el, ignoreThreads)) {
continue;
}
StringBuilder buff = new StringBuilder();
@@ -388,7 +393,7 @@ public class Profiler implements Runnable {
String last = null;
boolean packageCounts = false;
for (int j = 0, i = 0; i < dump.length && j < depth; i++) {
- String el = dump[i].toString();
+ el = dump[i].toString();
if (el.startsWith("app//")) {
el = el.substring("app//".length());
}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/StatsBuilder.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/StatsBuilder.java
index f87216d249..018670e97d 100644
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/StatsBuilder.java
+++
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/StatsBuilder.java
@@ -34,54 +34,52 @@ import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.module
import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.modules.TopLargestBinaries;
import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeData;
import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeDataReader;
-import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeLineReader;
-import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeStreamReader;
-import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeStreamReaderCompressed;
+import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeTreeStoreReader;
/**
* Builder for commonly used statistics for flat file stores.
*/
public class StatsBuilder {
- private static final boolean ONLY_READ = false;
-
- /**
- * Read a flat file store and build statistics.
- *
- * @param args the file name
- */
public static void main(String... args) throws Exception {
String fileName = null;
String nodeNameFilter = null;
- boolean stream = false;
- boolean compressedStream = false;
+ boolean profiler = false;
for(int i = 0; i<args.length; i++) {
String a = args[i];
if (a.equals("--fileName")) {
fileName = args[++i];
} else if (a.equals("--nodeNameFilter")) {
nodeNameFilter = args[++i];
- } else if (a.endsWith("--stream")) {
- stream = true;
- } else if (a.equals("--compressedStream")) {
- compressedStream = true;
+ } else if (a.equals("--profiler")) {
+ profiler = true;
}
}
if (fileName == null) {
System.out.println("Command line arguments:");
System.out.println(" --fileName <file name> (flat file store
file name; mandatory)");
System.out.println(" --nodeNameFilter <filter> (node name filter
for binaries; optional)");
- System.out.println(" --stream (use a stream
file; optional)");
- System.out.println(" --compressedStream (use a compressed
stream file; optional)");
+ System.out.println(" --profiler (enable the
build-in profiler; optional)");
return;
}
+ buildStats(fileName, nodeNameFilter, profiler);
+ }
+
+ /**
+ * Read a flat file store and build statistics.
+ *
+ * @param fileName the file name
+ * @param nodeNameFilter the node names to filter
+ * @param profiler also run the profiler
+ */
+ public static void buildStats(String fileName, String nodeNameFilter,
boolean profiler) throws IOException {
System.out.println("Processing " + fileName);
ListCollector collectors = new ListCollector();
collectors.add(new NodeCount(1000, 1));
PropertyStats ps = new PropertyStats(false, 1);
collectors.add(ps);
collectors.add(new NodeTypeCount());
- if (nodeNameFilter != null) {
+ if (nodeNameFilter != null && !nodeNameFilter.isEmpty()) {
collectors.add(new NodeNameFilter(nodeNameFilter, new
BinarySize(false, 1)));
collectors.add(new NodeNameFilter(nodeNameFilter, new
BinarySize(true, 1)));
collectors.add(new NodeNameFilter(nodeNameFilter, new
BinarySizeHistogram(1)));
@@ -94,18 +92,16 @@ public class StatsBuilder {
collectors.add(new DistinctBinarySizeHistogram(1));
collectors.add(new DistinctBinarySize(16, 16));
- Profiler prof = new Profiler().startCollecting();
- NodeDataReader reader;
- if (compressedStream) {
- reader = NodeStreamReaderCompressed.open(fileName);
- } else if (stream) {
- reader = NodeStreamReader.open(fileName);
- } else {
- reader = NodeLineReader.open(fileName);
+ Profiler prof = null;
+ if (profiler) {
+ prof = new Profiler().startCollecting();
}
+ NodeDataReader reader = NodeTreeStoreReader.open(fileName);
collect(reader, collectors);
- System.out.println(prof.getTop(10));
+ if (profiler) {
+ System.out.println(prof.getTop(10));
+ }
System.out.println();
System.out.println("Results");
System.out.println();
@@ -123,11 +119,13 @@ public class StatsBuilder {
if (node == null) {
break;
}
- if (++lineCount % 1000000 == 0) {
- System.out.println(lineCount + " lines; " +
reader.getProgressPercent() + "%");
- }
- if (ONLY_READ) {
- continue;
+ if (++lineCount % 1_000_000 == 0) {
+ String msg = lineCount + " entries";
+ int progressPercent = reader.getProgressPercent();
+ if (progressPercent != 0) {
+ msg += "; " + progressPercent + "%";
+ }
+ System.out.println(msg);
}
if (last != null) {
while (last != null && last.getPathElements().size() >=
node.getPathElements().size()) {
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/modules/DistinctBinarySize.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/modules/DistinctBinarySize.java
index 7f6738b08e..d1583007c7 100644
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/modules/DistinctBinarySize.java
+++
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/modules/DistinctBinarySize.java
@@ -106,7 +106,7 @@ public class DistinctBinarySize implements StatsCollector {
referenceCount += list.size();
for(BinaryId id : list) {
referenceSize += id.getLength();
- if (largeBinariesCountMax > 0 && id.getLength() >=
largeBinarySizeThreshold) {
+ if (largeBinariesCountMax > 0 && id.getLength() >
largeBinarySizeThreshold) {
largeBinaries.add(id);
truncateLargeBinariesSet();
} else {
@@ -132,15 +132,15 @@ public class DistinctBinarySize implements StatsCollector
{
}
long[] lengths = new long[largeBinaries.size()];
int i = 0;
- for(BinaryId id : largeBinaries) {
+ for (BinaryId id : largeBinaries) {
lengths[i++] = id.getLength();
}
Arrays.sort(lengths);
// the new threshold is the median of all the lengths
largeBinarySizeThreshold = lengths[largeBinariesCountMax];
- for(Iterator<BinaryId> it = largeBinaries.iterator(); it.hasNext();) {
+ for (Iterator<BinaryId> it = largeBinaries.iterator(); it.hasNext();) {
BinaryId id = it.next();
- if (id.getLength() < largeBinarySizeThreshold) {
+ if (id.getLength() <= largeBinarySizeThreshold) {
addToBloomFilter(id);
it.remove();
}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamConverter.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamConverter.java
deleted file mode 100644
index 6e812f6222..0000000000
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamConverter.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-
-import java.io.BufferedOutputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-
-import net.jpountz.lz4.LZ4FrameOutputStream;
-
-/**
- * Allows to convert a flat-file store to a node stream.
- */
-public class NodeStreamConverter {
-
- public static void main(String... args) throws IOException {
- String sourceFileName = args[0];
- String targetFileName = args[1];
- convert(sourceFileName, targetFileName);
- }
-
- public static void convert(String sourceFileName, String targetFileName)
throws IOException {
- try (NodeLineReader in = NodeLineReader.open(sourceFileName)) {
- try (OutputStream fileOut = new BufferedOutputStream(new
FileOutputStream(targetFileName))) {
- try (OutputStream out = new LZ4FrameOutputStream(fileOut)) {
- while (true) {
- NodeData node = in.readNode();
- if (node == null) {
- break;
- }
- writeNode(out, node);
- }
- }
- }
- }
- }
-
- private static void writeNode(OutputStream out, NodeData node) throws
IOException {
- writeVarInt(out, node.getPathElements().size());
- for(String s : node.getPathElements()) {
- writeString(out, s);
- }
- writeVarInt(out, node.getProperties().size());
- for (NodeProperty p : node.getProperties()) {
- writeString(out, p.getName());
- out.write(p.getType().getOrdinal());
- if (p.isMultiple()) {
- out.write(1);
- writeVarInt(out, p.getValues().length);
- for (String s : p.getValues()) {
- writeString(out, s);
- }
- } else {
- out.write(0);
- writeString(out, p.getValues()[0]);
- }
- }
- }
-
- private static void writeString(OutputStream out, String s) throws
IOException {
- if (s == null) {
- writeVarInt(out, -1);
- } else {
- byte[] utf8 = s.getBytes(StandardCharsets.UTF_8);
- writeVarInt(out, utf8.length);
- out.write(utf8);
- }
- }
-
- /**
- * Write a variable size int.
- *
- * @param out the output stream
- * @param x the value
- * @throws IOException if some data could not be written
- */
- public static void writeVarInt(OutputStream out, int x) throws IOException
{
- while ((x & ~0x7f) != 0) {
- out.write((byte) (x | 0x80));
- x >>>= 7;
- }
- out.write((byte) x);
- }
-
-}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamConverterCompressed.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamConverterCompressed.java
deleted file mode 100644
index a8f7cbfd8f..0000000000
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamConverterCompressed.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-
-import static
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeStreamConverter.writeVarInt;
-
-import java.io.BufferedOutputStream;
-import java.io.Closeable;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-
-import net.jpountz.lz4.LZ4FrameOutputStream;
-
-/**
- * Allows to to convert a flat file store to a compressed stream of nodes.
- */
-public class NodeStreamConverterCompressed implements Closeable {
-
- private static final int WINDOW_SIZE = 1024;
- private static final int CACHE_SIZE = 8 * 1024;
- private static final int MAX_LENGTH = 1024;
-
- private final OutputStream out;
- private final long[] cacheId = new long[CACHE_SIZE];
- private final String[] cache = new String[CACHE_SIZE];
- private long currentId;
-
- public static void main(String... args) throws IOException {
- String sourceFileName = args[0];
- String targetFileName = args[1];
- convert(sourceFileName, targetFileName);
- }
-
- private NodeStreamConverterCompressed(OutputStream out) {
- this.out = out;
- }
-
- public static void convert(String sourceFileName, String targetFileName)
throws IOException {
- try (NodeLineReader in = NodeLineReader.open(sourceFileName)) {
- try (OutputStream fileOut = new BufferedOutputStream(new
FileOutputStream(targetFileName))) {
- try (OutputStream out = new LZ4FrameOutputStream(fileOut)) {
- try (NodeStreamConverterCompressed writer = new
NodeStreamConverterCompressed(out)) {
- int count = 0;
- while (true) {
- NodeData node = in.readNode();
- if (node == null) {
- break;
- }
- writer.writeNode(node);
- if (++count % 1000000 == 0) {
- System.out.println(count + " lines converted");
- }
- }
- }
- }
- }
- }
- }
-
- private void writeNode(NodeData node) throws IOException {
- writeVarInt(out, node.getPathElements().size());
- for(String s : node.getPathElements()) {
- writeString(s);
- }
- writeVarInt(out, node.getProperties().size());
- for (NodeProperty p : node.getProperties()) {
- writeString(p.getName());
- out.write(p.getType().getOrdinal());
- if (p.isMultiple()) {
- out.write(1);
- writeVarInt(out, p.getValues().length);
- for (String s : p.getValues()) {
- writeString(s);
- }
- } else {
- out.write(0);
- writeString(p.getValues()[0]);
- }
- }
- }
-
- private void writeString(String s) throws IOException {
- if (s == null) {
- NodeStreamConverter.writeVarInt(out, 0);
- return;
- }
- int len = s.length();
- if (len < MAX_LENGTH) {
- if (len == 0) {
- NodeStreamConverter.writeVarInt(out, 1);
- return;
- }
- int index = s.hashCode() & (CACHE_SIZE - 1);
- String old = cache[index];
- if (old != null && old.equals(s)) {
- long offset = currentId - cacheId[index];
- if (offset < WINDOW_SIZE) {
- cacheId[index] = currentId++;
- NodeStreamConverter.writeVarInt(out, (int) ((offset << 1)
| 1));
- return;
- }
- }
- cacheId[index] = currentId++;
- cache[index] = s;
- }
- byte[] utf8 = s.getBytes(StandardCharsets.UTF_8);
- writeVarInt(out, utf8.length << 1);
- out.write(utf8);
- }
-
- @Override
- public void close() throws IOException {
- out.close();
- }
-
-}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamReader.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamReader.java
deleted file mode 100644
index c4d6ef5f23..0000000000
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamReader.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-
-import org.apache.commons.io.input.CountingInputStream;
-import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeProperty.ValueType;
-
-import net.jpountz.lz4.LZ4FrameInputStream;
-
-/**
- * A node stream reader.
- */
-public class NodeStreamReader implements NodeDataReader {
-
- private final CountingInputStream countIn;
- private final InputStream in;
- private final long fileSize;
-
- private byte[] buffer = new byte[1024 * 1024];
-
- private NodeStreamReader(CountingInputStream countIn, InputStream in, long
fileSize) {
- this.countIn = countIn;
- this.in = in;
- this.fileSize = fileSize;
- }
-
- public int getProgressPercent() {
- return (int) (100 * countIn.getByteCount() / Math.max(1, fileSize));
- }
-
- public static NodeStreamReader open(String fileName) throws IOException {
- long fileSize = new File(fileName).length();
- InputStream fileIn = new FileInputStream(fileName);
- CountingInputStream countIn = new CountingInputStream(fileIn);
- try {
- InputStream in;
- if (fileName.endsWith(".lz4")) {
- in = new LZ4FrameInputStream(countIn); //NOSONAR
- } else {
- in = countIn;
- }
- return new NodeStreamReader(countIn, in, fileSize);
- } catch (IOException e) {
- countIn.close();
- throw e;
- }
- }
-
- /**
- * Read a variable size int.
- *
- * @return the value
- * @throws IOException
- */
- public static int readVarInt(InputStream in) throws IOException {
- int b = in.read();
- if ((b & 0x80) == 0) {
- return b;
- }
- // a separate function so that this one can be inlined
- return readVarIntRest(in, b);
- }
-
- private static int readVarIntRest(InputStream in, int b) throws
IOException {
- int x = b & 0x7f;
- b = in.read();
- if ((b & 0x80) == 0) {
- return x | (b << 7);
- }
- x |= (b & 0x7f) << 7;
- b = in.read();
- if ((b & 0x80) == 0) {
- return x | (b << 14);
- }
- x |= (b & 0x7f) << 14;
- b = in.read();
- if ((b & 0x80) == 0) {
- return x | b << 21;
- }
- x |= ((b & 0x7f) << 21) | (in.read() << 28);
- return x;
- }
-
- public NodeData readNode() throws IOException {
- int size = readVarInt(in);
- if (size < 0) {
- close();
- return null;
- }
- ArrayList<String> pathElements = new ArrayList<>(size);
- for (int i = 0; i < size; i++) {
- pathElements.add(readString(in));
- }
- int propertyCount = readVarInt(in);
- ArrayList<NodeProperty> properties = new ArrayList<>(propertyCount);
- for (int i = 0; i < propertyCount; i++) {
- NodeProperty p;
- String name = readString(in);
- ValueType type = ValueType.byOrdinal(in.read());
- if (in.read() == 1) {
- int count = readVarInt(in);
- String[] values = new String[count];
- for (int j = 0; j < count; j++) {
- values[j] = readString(in);
- }
- p = new NodeProperty(name, type, values, true);
- } else {
- String value = readString(in);
- p = new NodeProperty(name, type, value);
- }
- properties.add(p);
- }
- return new NodeData(pathElements, properties);
- }
-
- private String readString(InputStream in) throws IOException {
- int len = readVarInt(in);
- if (len == -1) {
- return null;
- }
- byte[] buff = buffer;
- if (len > buff.length) {
- buff = buffer = new byte[len];
- }
- int read = in.readNBytes(buff, 0, len);
- if (read != len) {
- throw new EOFException();
- }
- return new String(buff, 0, len, StandardCharsets.UTF_8);
- }
-
- @Override
- public long getFileSize() {
- return fileSize;
- }
-
- @Override
- public void close() throws IOException {
- in.close();
- }
-
-}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamReaderCompressed.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamReaderCompressed.java
deleted file mode 100644
index 1ed8c4a641..0000000000
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamReaderCompressed.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-
-import static
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeStreamReader.readVarInt;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-
-import org.apache.commons.io.input.CountingInputStream;
-import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeProperty.ValueType;
-
-import net.jpountz.lz4.LZ4FrameInputStream;
-
-/**
- * A node stream reader with compression for repeated strings.
- */
-public class NodeStreamReaderCompressed implements NodeDataReader {
-
- private static final int MAX_LENGTH = 1024;
- private static final int WINDOW_SIZE = 1024;
-
- private final CountingInputStream countIn;
- private final InputStream in;
- private final long fileSize;
- private final String[] lastStrings = new String[WINDOW_SIZE];
-
- private long currentId;
- private byte[] buffer = new byte[1024 * 1024];
-
- private NodeStreamReaderCompressed(CountingInputStream countIn,
InputStream in, long fileSize) {
- this.countIn = countIn;
- this.in = in;
- this.fileSize = fileSize;
- }
-
- public int getProgressPercent() {
- return (int) (100 * countIn.getByteCount() / Math.max(1, fileSize));
- }
-
- public static NodeStreamReaderCompressed open(String fileName) throws
IOException {
- long fileSize = new File(fileName).length();
- InputStream fileIn = new FileInputStream(fileName);
- CountingInputStream countIn = new CountingInputStream(fileIn);
- try {
- InputStream in;
- if (fileName.endsWith(".lz4")) {
- in = new LZ4FrameInputStream(countIn); //NOSONAR
- } else {
- in = countIn;
- }
- return new NodeStreamReaderCompressed(countIn, in, fileSize);
- } catch (IOException e) {
- countIn.close();
- throw e;
- }
- }
-
- public NodeData readNode() throws IOException {
- int size = readVarInt(in);
- if (size < 0) {
- close();
- return null;
- }
- ArrayList<String> pathElements = new ArrayList<>(size);
- for (int i = 0; i < size; i++) {
- pathElements.add(readString(in));
- }
- int propertyCount = readVarInt(in);
- ArrayList<NodeProperty> properties = new ArrayList<>(propertyCount);
- for (int i = 0; i < propertyCount; i++) {
- NodeProperty p;
- String name = readString(in);
- ValueType type = ValueType.byOrdinal(in.read());
- if (in.read() == 1) {
- int count = readVarInt(in);
- String[] values = new String[count];
- for (int j = 0; j < count; j++) {
- values[j] = readString(in);
- }
- p = new NodeProperty(name, type, values, true);
- } else {
- String value = readString(in);
- p = new NodeProperty(name, type, value);
- }
- properties.add(p);
- }
- return new NodeData(pathElements, properties);
- }
-
- private String readString(InputStream in) throws IOException {
- int len = readVarInt(in);
- if (len < 2) {
- if (len == 0) {
- return null;
- } else if (len == 1) {
- return "";
- }
- }
- if ((len & 1) == 1) {
- int offset = len >>> 1;
- String s = lastStrings[(int) (currentId - offset) & (WINDOW_SIZE -
1)];
- lastStrings[(int) currentId & (WINDOW_SIZE - 1)] = s;
- currentId++;
- return s;
- }
- len = len >>> 1;
- byte[] buff = buffer;
- if (len > buff.length) {
- buff = buffer = new byte[len];
- }
- int read = in.readNBytes(buff, 0, len);
- if (read != len) {
- throw new EOFException();
- }
- String s = new String(buff, 0, len, StandardCharsets.UTF_8);
- if (s.length() < MAX_LENGTH) {
- lastStrings[(int) currentId & (WINDOW_SIZE - 1)] = s;
- currentId++;
- }
- return s;
- }
-
- @Override
- public long getFileSize() {
- return fileSize;
- }
-
- @Override
- public void close() throws IOException {
- in.close();
- }
-
-}
diff --git
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeLineReader.java
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeTreeStoreReader.java
similarity index 78%
rename from
oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeLineReader.java
rename to
oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeTreeStoreReader.java
index 6a45bc52f8..b1222097de 100644
---
a/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeLineReader.java
+++
b/oak-run-commons/src/main/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeTreeStoreReader.java
@@ -18,86 +18,62 @@
*/
package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-import java.io.BufferedInputStream;
-import java.io.Closeable;
import java.io.File;
-import java.io.FileInputStream;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.LineNumberReader;
-import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
+import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import javax.jcr.PropertyType;
-import org.apache.commons.io.input.CountingInputStream;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.json.JsonObject;
import org.apache.jackrabbit.oak.commons.json.JsopReader;
import org.apache.jackrabbit.oak.commons.json.JsopTokenizer;
+import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.NodeStateEntryReader;
import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeProperty.PropertyValue;
import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream.NodeProperty.ValueType;
-
-import net.jpountz.lz4.LZ4FrameInputStream;
+import org.apache.jackrabbit.oak.index.indexer.document.tree.TreeStore;
+import org.apache.jackrabbit.oak.spi.blob.BlobStore;
/**
- * A reader for flat file stores.
+ * A reader for tree store files.
*/
-public class NodeLineReader implements NodeDataReader, Closeable {
+public class NodeTreeStoreReader implements NodeDataReader {
- private final CountingInputStream countIn;
- private final LineNumberReader reader;
+ private final TreeStore treeStore;
+ private final Iterator<String> pathIterator;
private final long fileSize;
- private NodeLineReader(CountingInputStream countIn, LineNumberReader
reader, long fileSize) {
- this.countIn = countIn;
- this.reader = reader;
- this.fileSize = fileSize;
+ public static NodeDataReader open(String fileName) {
+ BlobStore blobStore = null;
+ NodeStateEntryReader entryReader = new NodeStateEntryReader(blobStore);
+ File file = new File(fileName);
+ TreeStore treeStore = new TreeStore("reader", file, entryReader, 32);
+ return new NodeTreeStoreReader(treeStore, file.length());
}
- public int getProgressPercent() {
- if (fileSize == 0) {
- return 100;
- }
- return (int) (100 * countIn.getByteCount() / fileSize);
+ private NodeTreeStoreReader(TreeStore treeStore, long fileSize) {
+ this.treeStore = treeStore;
+ this.fileSize = fileSize;
+ this.pathIterator = treeStore.iteratorOverPaths();
}
- public static NodeLineReader open(String fileName) throws IOException {
- long fileSize = new File(fileName).length();
- InputStream fileIn = new BufferedInputStream(new
FileInputStream(fileName));
- CountingInputStream countIn = new CountingInputStream(fileIn);
- try {
- InputStream in;
- if (fileName.endsWith(".lz4")) {
- in = new LZ4FrameInputStream(countIn);
- } else {
- in = countIn;
- }
- LineNumberReader reader = new LineNumberReader(new
InputStreamReader(in, StandardCharsets.UTF_8));
- return new NodeLineReader(countIn, reader, fileSize);
- } catch (IOException e) {
- countIn.close();
- throw e;
- }
+ @Override
+ public void close() throws IOException {
+ treeStore.close();
}
+ @Override
public NodeData readNode() throws IOException {
- String line = reader.readLine();
- if (line == null) {
- close();
+ if (!pathIterator.hasNext()) {
return null;
}
- int pipeIndex = line.indexOf('|');
- if (pipeIndex < 0) {
- throw new IllegalArgumentException("Error: no pipe: " + line);
- }
- String path = line.substring(0, pipeIndex);
+ String path = pathIterator.next();
List<String> pathElements = new ArrayList<>();
PathUtils.elements(path).forEach(pathElements::add);
- String nodeJson = line.substring(pipeIndex + 1);
+ String nodeJson = treeStore.getSession().get(path);
return new NodeData(pathElements, parse(nodeJson));
}
@@ -135,6 +111,32 @@ public class NodeLineReader implements NodeDataReader,
Closeable {
return properties;
}
+ public static NodeProperty fromJsonArray(String key, String json) {
+ ArrayList<String> result = new ArrayList<>();
+ ValueType type = null;
+ JsopTokenizer tokenizer = new JsopTokenizer(json);
+ tokenizer.read('[');
+ if (!tokenizer.matches(']')) {
+ do {
+ String r = tokenizer.readRawValue();
+ PropertyValue v = getValue(r);
+ if (v == null) {
+ throw new IllegalArgumentException("Array of empty arrays:
" + json);
+ } else if (type != null && v.type != type) {
+ throw new IllegalArgumentException("Unsupported mixed
type: " + json);
+ }
+ result.add(v.value);
+ type = v.type;
+ } while (tokenizer.matches(','));
+ tokenizer.read(']');
+ }
+ tokenizer.read(JsopReader.END);
+ if (type == null) {
+ type = ValueType.STRING;
+ }
+ return new NodeProperty(key, type, result.toArray(new
String[result.size()]), true);
+ }
+
/**
* Convert to a value if possible
*
@@ -188,40 +190,14 @@ public class NodeLineReader implements NodeDataReader,
Closeable {
}
}
- public static NodeProperty fromJsonArray(String key, String json) {
- ArrayList<String> result = new ArrayList<>();
- ValueType type = null;
- JsopTokenizer tokenizer = new JsopTokenizer(json);
- tokenizer.read('[');
- if (!tokenizer.matches(']')) {
- do {
- String r = tokenizer.readRawValue();
- PropertyValue v = getValue(r);
- if (v == null) {
- throw new IllegalArgumentException("Array of empty arrays:
" + json);
- } else if (type != null && v.type != type) {
- throw new IllegalArgumentException("Unsupported mixed
type: " + json);
- }
- result.add(v.value);
- type = v.type;
- } while (tokenizer.matches(','));
- tokenizer.read(']');
- }
- tokenizer.read(JsopReader.END);
- if (type == null) {
- type = ValueType.STRING;
- }
- return new NodeProperty(key, type, result.toArray(new
String[result.size()]), true);
- }
-
@Override
public long getFileSize() {
return fileSize;
}
@Override
- public void close() throws IOException {
- reader.close();
+ public int getProgressPercent() {
+ return 0;
}
}
diff --git
a/oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamTest.java
b/oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamTest.java
deleted file mode 100644
index 3f2818ef16..0000000000
---
a/oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/NodeStreamTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-
-public class NodeStreamTest {
-
- @ClassRule
- public static TemporaryFolder temporaryFolder = new TemporaryFolder(new
File("target"));
-
- @Test
- public void test() throws IOException {
- File f = temporaryFolder.getRoot();
- File flatFile = new File(f, "flatFile.txt");
- BufferedWriter w = new BufferedWriter(new FileWriter(flatFile));
- for (int i = 0; i < 10; i++) {
- StringBuilder buff = new StringBuilder();
- for (int j = 0; j < 16; j++) {
- if (j > 0) {
- buff.append(",");
- }
- String value = Integer.toHexString(j).repeat(1 << j);
- buff.append("\"x" + j + "\":\"" + value + "\"");
- }
- buff.append(",\"a\":true");
- buff.append(",\"b\":false");
- buff.append(",\"c\":\":blobId:0x12\"");
- buff.append(",\"d\":\"str:1\"");
- buff.append(",\"e\":\"nam:2\"");
- buff.append(",\"f\":\"ref:3\"");
- buff.append(",\"g\":\"dat:4\"");
- buff.append(",\"h\":\"dec:5\"");
- buff.append(",\"i\":\"dou:6\"");
- buff.append(",\"j\":\"wea:7\"");
- buff.append(",\"k\":\"uri:8\"");
- buff.append(",\"l\":\"pat:9\"");
- buff.append(",\"m\":\"[0]:Name\"");
- w.write("/n" + i + "|{" + buff.toString() +
- ",\"n\":null,\"x\":[\"1\"]}\n");
- }
- w.close();
-
- File streamFile = new File(f, "streamFile.lz4");
- File compressedStreamFile = new File(f, "compressedStreamFile.lz4");
-
- NodeStreamConverter.convert(flatFile.getAbsolutePath(),
streamFile.getAbsolutePath());
- NodeStreamConverterCompressed.convert(flatFile.getAbsolutePath(),
compressedStreamFile.getAbsolutePath());
-
- NodeDataReader flatReader =
NodeLineReader.open(flatFile.getAbsolutePath());
- long fileSize1 = flatReader.getFileSize();
- NodeDataReader nodeStream =
NodeStreamReader.open(streamFile.getAbsolutePath());
- long fileSize2 = nodeStream.getFileSize();
- NodeDataReader compressedStream =
NodeStreamReaderCompressed.open(compressedStreamFile.getAbsolutePath());
- long fileSize3 = compressedStream.getFileSize();
- assertTrue(fileSize3 < fileSize2);
- assertTrue(fileSize2 < fileSize1);
- while (true) {
- NodeData n1 = flatReader.readNode();
- NodeData n2 = nodeStream.readNode();
- NodeData n3 = compressedStream.readNode();
- if (n1 == null) {
- assertNull(n2);
- assertNull(n3);
- break;
- }
- assertEquals(n1.toString(), n2.toString());
- assertEquals(n1.toString(), n3.toString());
- }
- }
-}
diff --git
a/oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/VarIntTest.java
b/oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/VarIntTest.java
deleted file mode 100644
index 77f0417ba6..0000000000
---
a/oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/analysis/stream/VarIntTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.stream;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-
-import org.junit.Test;
-
-/**
- * Tests for variable size integer streaming.
- */
-public class VarIntTest {
-
- @Test
- public void test() throws IOException {
- for (int i = 1; i > 0; i *= 2) {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- NodeStreamConverter.writeVarInt(out, i);
- ByteArrayInputStream in = new
ByteArrayInputStream(out.toByteArray());
- int test = NodeStreamReader.readVarInt(in);
- assertEquals(test, i);
- assertEquals(-1, in.read());
- }
- }
-}
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreCommand.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreCommand.java
index c28a0e8a6d..2699eca881 100644
---
a/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreCommand.java
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreCommand.java
@@ -39,7 +39,7 @@ import joptsimple.OptionSpec;
public class IndexStoreCommand implements Command {
- public final static String INDEX_STORE = "index-store";
+ public final static String NAME = "index-store";
@SuppressWarnings("unchecked")
@Override
@@ -52,7 +52,7 @@ public class IndexStoreCommand implements Command {
"An index store file").ofType(File.class);
if (options.has(helpSpec)
|| options.nonOptionArguments().isEmpty()) {
- System.out.println("Mode: " + INDEX_STORE);
+ System.out.println("Mode: " + NAME);
System.out.println();
parser.printHelpOn(System.out);
return;
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreStatsCommand.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreStatsCommand.java
new file mode 100644
index 0000000000..57a6ba6f20
--- /dev/null
+++
b/oak-run/src/main/java/org/apache/jackrabbit/oak/index/merge/IndexStoreStatsCommand.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.jackrabbit.oak.index.merge;
+
+import static java.util.Arrays.asList;
+
+import java.io.File;
+import java.io.IOException;
+
+import
org.apache.jackrabbit.oak.index.indexer.document.flatfile.analysis.StatsBuilder;
+import org.apache.jackrabbit.oak.run.commons.Command;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+public class IndexStoreStatsCommand implements Command {
+
+ public final static String NAME = "index-store-stats";
+
+ @Override
+ public void execute(String... args) throws IOException {
+ OptionParser parser = new OptionParser();
+ OptionSpec<String> nodeNameFilterOption = parser
+ .accepts("nodeNameFilter", "The node name to filter, if
any").withOptionalArg()
+ .defaultsTo("");
+ OptionSpec<Boolean> profilerOption = parser
+ .accepts("profiler", "Use the profiler (default:
disabled)").withOptionalArg()
+ .ofType(Boolean.class).defaultsTo(false);
+ OptionSpec<?> helpSpec = parser.acceptsAll(
+ asList("h", "?", "help"), "Prints help and exits").forHelp();
+ OptionSet options = parser.parse(args);
+ parser.nonOptions(
+ "An index store file").ofType(File.class);
+
+ if (options.has(helpSpec)
+ || options.nonOptionArguments().isEmpty()) {
+ System.out.println("Mode: " + NAME);
+ System.out.println("Calculate statistics (node count, binary
size,...) of a tree store");
+ System.out.println();
+ parser.printHelpOn(System.out);
+ return;
+ }
+ String nodeNameFilter = nodeNameFilterOption.value(options);
+ boolean profiler = profilerOption.value(options);
+
+ if (options.nonOptionArguments().size() < 1) {
+ System.err.println("This command requires a file name");
+ System.exit(1);
+ }
+ File file = new File(options.nonOptionArguments().get(0).toString());
+ if (!file.exists()) {
+ System.out.println("File not found: " + file.getAbsolutePath());
+ return;
+ }
+ StatsBuilder.buildStats(file.getAbsolutePath(), nodeNameFilter,
profiler);
+ }
+
+}
diff --git
a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/AvailableModes.java
b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/AvailableModes.java
index bd4ecb9cf7..87a37a46a8 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/AvailableModes.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/AvailableModes.java
@@ -22,6 +22,7 @@ import
org.apache.jackrabbit.oak.exporter.NodeStateExportCommand;
import org.apache.jackrabbit.oak.index.IndexCommand;
import org.apache.jackrabbit.oak.index.merge.IndexDiffCommand;
import org.apache.jackrabbit.oak.index.merge.IndexStoreCommand;
+import org.apache.jackrabbit.oak.index.merge.IndexStoreStatsCommand;
import org.apache.jackrabbit.oak.run.commons.Command;
import org.apache.jackrabbit.oak.run.commons.Modes;
@@ -58,7 +59,8 @@ public final class AvailableModes {
builder.put("history", new HistoryCommand());
builder.put("index-diff", new IndexDiffCommand());
builder.put("index-merge", new IndexMergeCommand());
- builder.put(IndexStoreCommand.INDEX_STORE, new IndexStoreCommand());
+ builder.put(IndexStoreCommand.NAME, new IndexStoreCommand());
+ builder.put(IndexStoreStatsCommand.NAME, new IndexStoreStatsCommand());
builder.put(IndexCommand.NAME, new IndexCommand());
builder.put(IOTraceCommand.NAME, new IOTraceCommand());
builder.put(JsonIndexCommand.INDEX, new JsonIndexCommand());