adelapena commented on code in PR #2409:
URL: https://github.com/apache/cassandra/pull/2409#discussion_r1229704196


##########
src/java/org/apache/cassandra/index/sai/disk/v1/bbtree/NumericIndexWriter.java:
##########
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.cassandra.index.sai.disk.v1.bbtree;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.MoreObjects;
+
+import org.apache.cassandra.index.sai.IndexContext;
+import org.apache.cassandra.index.sai.disk.format.IndexComponent;
+import org.apache.cassandra.index.sai.disk.format.IndexDescriptor;
+import org.apache.cassandra.index.sai.disk.v1.segment.SegmentMetadata;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.packed.PackedInts;
+import org.apache.lucene.util.packed.PackedLongValues;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+/**
+ * Specialized writer for point values, that builds them into a {@link 
BlockBalancedTreeWriter} with auxiliary
+ * posting lists on eligible tree levels.
+ * <p>
+ * Given a sorted input {@link IntersectingPointValues}, the flush process is 
optimised because we don't need to
+ * buffer all point values to sort them.
+ */
+public class NumericIndexWriter
+{
+    public static final int MAX_POINTS_IN_LEAF_NODE = 
BlockBalancedTreeWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE;
+
+    private final BlockBalancedTreeWriter writer;
+    private final IndexDescriptor indexDescriptor;
+    private final IndexContext indexContext;
+    private final int bytesPerValue;
+
+    /**
+     * @param maxSegmentRowId maximum possible segment row ID, used to create 
`maxDoc` for the balanced tree
+     */
+    public NumericIndexWriter(IndexDescriptor indexDescriptor,
+                              IndexContext indexContext,
+                              int bytesPerValue,
+                              long maxSegmentRowId)
+    {
+        this(indexDescriptor, indexContext, MAX_POINTS_IN_LEAF_NODE, 
bytesPerValue, maxSegmentRowId);
+    }
+
+    @VisibleForTesting
+    public NumericIndexWriter(IndexDescriptor indexDescriptor,
+                              IndexContext indexContext,
+                              int maxPointsInLeafNode,
+                              int bytesPerValue,
+                              long maxSegmentRowId)
+    {
+        checkArgument(maxSegmentRowId >= 0, "[%s] maxRowId must be 
non-negative value, but got %s", indexContext.getIndexName(), maxSegmentRowId);
+
+        this.indexDescriptor = indexDescriptor;
+        this.indexContext = indexContext;
+        this.bytesPerValue = bytesPerValue;
+        this.writer = new BlockBalancedTreeWriter(maxSegmentRowId + 1, 
bytesPerValue, maxPointsInLeafNode);
+    }
+
+    @Override
+    public String toString()
+    {
+        return MoreObjects.toStringHelper(this)
+                          .add("bytesPerDim", bytesPerValue)
+                          .add("bufferedPoints", writer.getPointCount())
+                          .toString();
+    }
+
+    public static class LeafCallback implements 
BlockBalancedTreeWriter.Callback
+    {
+        final List<PackedLongValues> postings = new ArrayList<>();
+
+        public int numLeaves()
+        {
+            return postings.size();
+        }
+
+        @Override
+        public void writeLeafDocs(BlockBalancedTreeWriter.RowIDAndIndex[] 
sortedByRowID, int offset, int count)
+        {
+            PackedLongValues.Builder builder = 
PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
+
+            for (int i = offset; i < count; ++i)
+            {
+                builder.add(sortedByRowID[i].rowID);
+            }
+            postings.add(builder.build());
+        }
+    }
+
+    /**
+     * Writes a balanced tree and posting lists from a {@link 
org.apache.lucene.codecs.MutablePointValues}.
+     *
+     * @param values points to write
+     *
+     * @return metadata describing the location and size of this balanced tree 
in the overall SSTable balanced tree component file
+     */
+    public SegmentMetadata.ComponentMetadataMap 
writeAll(IntersectingPointValues values) throws IOException
+    {
+        long treePosition;
+        SegmentMetadata.ComponentMetadataMap components = new 
SegmentMetadata.ComponentMetadataMap();
+
+        LeafCallback leafCallback = new LeafCallback();
+
+        try (IndexOutput treeOutput = 
indexDescriptor.openPerIndexOutput(IndexComponent.BALANCED_TREE, indexContext, 
true))
+        {
+            // The SSTable balanced tree component file is opened in append 
mode, so our offset is the current file pointer.
+            long treeOffset = treeOutput.getFilePointer();
+
+            treePosition = writer.writeField(treeOutput, values, leafCallback);
+
+            // If the treePosition is less than 0 then we didn't write any 
values out
+            // and the index is empty

Review Comment:
   Nit: no need to split the line



##########
src/java/org/apache/cassandra/index/sai/disk/v1/bbtree/NumericIndexWriter.java:
##########
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.cassandra.index.sai.disk.v1.bbtree;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.MoreObjects;
+
+import org.apache.cassandra.index.sai.IndexContext;
+import org.apache.cassandra.index.sai.disk.format.IndexComponent;
+import org.apache.cassandra.index.sai.disk.format.IndexDescriptor;
+import org.apache.cassandra.index.sai.disk.v1.segment.SegmentMetadata;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.packed.PackedInts;
+import org.apache.lucene.util.packed.PackedLongValues;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+/**
+ * Specialized writer for point values, that builds them into a {@link 
BlockBalancedTreeWriter} with auxiliary
+ * posting lists on eligible tree levels.
+ * <p>
+ * Given a sorted input {@link IntersectingPointValues}, the flush process is 
optimised because we don't need to
+ * buffer all point values to sort them.
+ */
+public class NumericIndexWriter
+{
+    public static final int MAX_POINTS_IN_LEAF_NODE = 
BlockBalancedTreeWriter.DEFAULT_MAX_POINTS_IN_LEAF_NODE;
+
+    private final BlockBalancedTreeWriter writer;
+    private final IndexDescriptor indexDescriptor;
+    private final IndexContext indexContext;
+    private final int bytesPerValue;
+
+    /**
+     * @param maxSegmentRowId maximum possible segment row ID, used to create 
`maxDoc` for the balanced tree
+     */
+    public NumericIndexWriter(IndexDescriptor indexDescriptor,
+                              IndexContext indexContext,
+                              int bytesPerValue,
+                              long maxSegmentRowId)
+    {
+        this(indexDescriptor, indexContext, MAX_POINTS_IN_LEAF_NODE, 
bytesPerValue, maxSegmentRowId);
+    }
+
+    @VisibleForTesting
+    public NumericIndexWriter(IndexDescriptor indexDescriptor,
+                              IndexContext indexContext,
+                              int maxPointsInLeafNode,
+                              int bytesPerValue,
+                              long maxSegmentRowId)
+    {
+        checkArgument(maxSegmentRowId >= 0, "[%s] maxRowId must be 
non-negative value, but got %s", indexContext.getIndexName(), maxSegmentRowId);
+
+        this.indexDescriptor = indexDescriptor;
+        this.indexContext = indexContext;
+        this.bytesPerValue = bytesPerValue;
+        this.writer = new BlockBalancedTreeWriter(maxSegmentRowId + 1, 
bytesPerValue, maxPointsInLeafNode);
+    }
+
+    @Override
+    public String toString()
+    {
+        return MoreObjects.toStringHelper(this)
+                          .add("bytesPerDim", bytesPerValue)
+                          .add("bufferedPoints", writer.getPointCount())
+                          .toString();
+    }
+
+    public static class LeafCallback implements 
BlockBalancedTreeWriter.Callback
+    {
+        final List<PackedLongValues> postings = new ArrayList<>();
+
+        public int numLeaves()
+        {
+            return postings.size();
+        }
+
+        @Override
+        public void writeLeafDocs(BlockBalancedTreeWriter.RowIDAndIndex[] 
sortedByRowID, int offset, int count)
+        {
+            PackedLongValues.Builder builder = 
PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
+
+            for (int i = offset; i < count; ++i)
+            {
+                builder.add(sortedByRowID[i].rowID);
+            }
+            postings.add(builder.build());
+        }
+    }
+
+    /**
+     * Writes a balanced tree and posting lists from a {@link 
org.apache.lucene.codecs.MutablePointValues}.
+     *
+     * @param values points to write
+     *
+     * @return metadata describing the location and size of this balanced tree 
in the overall SSTable balanced tree component file
+     */
+    public SegmentMetadata.ComponentMetadataMap 
writeAll(IntersectingPointValues values) throws IOException
+    {
+        long treePosition;
+        SegmentMetadata.ComponentMetadataMap components = new 
SegmentMetadata.ComponentMetadataMap();
+
+        LeafCallback leafCallback = new LeafCallback();
+
+        try (IndexOutput treeOutput = 
indexDescriptor.openPerIndexOutput(IndexComponent.BALANCED_TREE, indexContext, 
true))
+        {
+            // The SSTable balanced tree component file is opened in append 
mode, so our offset is the current file pointer.
+            long treeOffset = treeOutput.getFilePointer();
+
+            treePosition = writer.writeField(treeOutput, values, leafCallback);
+
+            // If the treePosition is less than 0 then we didn't write any 
values out
+            // and the index is empty
+            if (treePosition < 0)
+                return components;
+
+            long treeLength = treeOutput.getFilePointer() - treeOffset;
+
+            Map<String, String> attributes = new LinkedHashMap<>();
+            attributes.put("max_points_in_leaf_node", 
Integer.toString(writer.getMaxPointsInLeafNode()));
+            attributes.put("num_leaves", 
Integer.toString(leafCallback.numLeaves()));
+            attributes.put("num_points", 
Long.toString(writer.getPointCount()));
+            attributes.put("bytes_per_dim", 
Long.toString(writer.getBytesPerValue()));

Review Comment:
   Could this be named "bytes_per_value", like the getter, having only one 
dimension?
   



##########
src/java/org/apache/cassandra/index/sai/disk/v1/bbtree/IntersectingPointValues.java:
##########
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.cassandra.index.sai.disk.v1.bbtree;
+
+import java.io.IOException;
+
+import org.apache.lucene.codecs.MutablePointValues;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.util.BytesRef;
+
+public abstract class IntersectingPointValues extends MutablePointValues

Review Comment:
   This could use some class JavaDoc



##########
src/java/org/apache/cassandra/index/sai/disk/v1/bbtree/TraversingBlockBalancedTreeReader.java:
##########
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.cassandra.index.sai.disk.v1.bbtree;
+
+import java.io.Closeable;
+
+import org.agrona.collections.IntArrayList;
+import org.apache.cassandra.index.sai.disk.io.IndexInputReader;
+import org.apache.cassandra.index.sai.disk.v1.SAICodecUtils;
+import org.apache.cassandra.io.util.FileHandle;
+import org.apache.cassandra.io.util.FileUtils;
+import org.apache.cassandra.io.util.RandomAccessReader;
+import org.apache.cassandra.utils.ObjectSizes;
+import org.apache.cassandra.utils.Throwables;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.FutureArrays;
+import org.apache.lucene.util.MathUtil;
+
+/**
+ * Base reader for a block balanced tree previously written with {@link 
BlockBalancedTreeWriter}.
+ * <p>
+ * Holds the index tree on heap and enables its traversal via {@link 
#traverse(IndexTreeTraversalCallback)}.
+ */
+public class TraversingBlockBalancedTreeReader implements Closeable
+{
+    final FileHandle treeIndexFile;
+    final int bytesPerValue;
+    final int numLeaves;
+    final byte[] minPackedValue;
+    final byte[] maxPackedValue;
+    // Packed array of byte[] holding all split values in the full binary tree:
+    final byte[] packedIndex;
+    final long pointCount;
+    final int maxPointsInLeafNode;
+    final int packedBytesLength;
+
+    TraversingBlockBalancedTreeReader(FileHandle treeIndexFile, long 
treeIndexRoot)
+    {
+        this.treeIndexFile = treeIndexFile;
+
+        try (RandomAccessReader reader = treeIndexFile.createReader();
+             IndexInputReader in = IndexInputReader.create(reader))
+        {
+            SAICodecUtils.validate(in);
+            in.seek(treeIndexRoot);
+
+            maxPointsInLeafNode = in.readVInt();
+            bytesPerValue = in.readVInt();
+            packedBytesLength = bytesPerValue;
+
+            // Read index:
+            numLeaves = in.readVInt();
+            assert numLeaves > 0;
+
+            minPackedValue = new byte[packedBytesLength];
+            maxPackedValue = new byte[packedBytesLength];
+
+            in.readBytes(minPackedValue, 0, packedBytesLength);
+            in.readBytes(maxPackedValue, 0, packedBytesLength);
+
+            if (FutureArrays.compareUnsigned(minPackedValue, 0, bytesPerValue, 
maxPackedValue, 0, bytesPerValue) > 0)
+            {
+                String message = String.format("Min packed value %s is > max 
packed value %s.",
+                                               new BytesRef(minPackedValue), 
new BytesRef(maxPackedValue));
+                throw new CorruptIndexException(message, in);
+            }
+
+            pointCount = in.readVLong();
+
+            int numBytes = in.readVInt();
+            packedIndex = new byte[numBytes];
+            in.readBytes(packedIndex, 0, numBytes);
+        }
+        catch (Throwable t)
+        {
+            FileUtils.closeQuietly(treeIndexFile);
+            throw Throwables.unchecked(t);
+        }
+    }
+
+    public long memoryUsage()
+    {
+        return ObjectSizes.sizeOfArray(packedIndex)
+               + ObjectSizes.sizeOfArray(minPackedValue)
+               + ObjectSizes.sizeOfArray(maxPackedValue);
+    }
+
+    @Override
+    public void close()
+    {
+        FileUtils.closeQuietly(treeIndexFile);
+    }
+
+    void traverse(IndexTreeTraversalCallback callback)
+    {
+        traverse(callback, new PackedIndexTree(), new IntArrayList());
+    }
+
+    private void traverse(IndexTreeTraversalCallback callback, PackedIndexTree 
index, IntArrayList pathToRoot)
+    {
+        if (index.isLeafNode())
+        {
+            // In the unbalanced case it's possible the left most node only 
has one child:
+            if (index.nodeExists())
+            {
+                callback.onLeaf(index.getNodeID(), index.getLeafBlockFP(), 
pathToRoot);
+            }
+        }
+        else
+        {
+            int nodeID = index.getNodeID();
+            IntArrayList currentPath = new IntArrayList();
+            currentPath.addAll(pathToRoot);
+            currentPath.add(nodeID);
+
+            index.pushLeft();
+            traverse(callback, index, currentPath);
+            index.pop();
+
+            index.pushRight();
+            traverse(callback, index, currentPath);
+            index.pop();
+        }
+    }
+
+    private int getTreeDepth()
+    {
+        // First +1 because all the non-leave nodes makes another power
+        // of 2; e.g. to have a fully balanced tree with 4 leaves you
+        // need a depth=3 tree:
+
+        // Second +1 because MathUtil.log computes floor of the logarithm; e.g.
+        // with 5 leaves you need a depth=4 tree:
+        return MathUtil.log(numLeaves, 2) + 2;
+    }
+
+    interface IndexTreeTraversalCallback

Review Comment:
   Maybe this could be named just `TraverseCallback`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to