RussellSpitzer commented on a change in pull request #3557:
URL: https://github.com/apache/iceberg/pull/3557#discussion_r768187084
##########
File path:
spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
##########
@@ -61,80 +62,154 @@ public void setDeleteFilter(DeleteFilter<InternalRow>
deleteFilter) {
@Override
public final ColumnarBatch read(ColumnarBatch reuse, int numRowsToRead) {
- Preconditions.checkArgument(numRowsToRead > 0, "Invalid number of rows to
read: %s", numRowsToRead);
- ColumnVector[] arrowColumnVectors = new ColumnVector[readers.length];
-
if (reuse == null) {
closeVectors();
}
- Pair<int[], Integer> rowIdMapping = rowIdMapping(numRowsToRead);
+ ColumnBatchLoader batchLoader = new ColumnBatchLoader(numRowsToRead);
+ rowStartPosInBatch += numRowsToRead;
+ return batchLoader.columnarBatch;
+ }
- for (int i = 0; i < readers.length; i += 1) {
- vectorHolders[i] = readers[i].read(vectorHolders[i], numRowsToRead);
- int numRowsInVector = vectorHolders[i].numValues();
- Preconditions.checkState(
- numRowsInVector == numRowsToRead,
- "Number of rows in the vector %s didn't match expected %s ",
numRowsInVector,
- numRowsToRead);
+ private class ColumnBatchLoader {
+ private int[] rowIdMapping; // the rowId mapping to skip deleted rows for
all column vectors inside a batch
+ private int numRows;
+ private ColumnarBatch columnarBatch;
- if (rowIdMapping == null) {
- arrowColumnVectors[i] =
IcebergArrowColumnVector.forHolder(vectorHolders[i], numRowsInVector);
- } else {
- int[] rowIdMap = rowIdMapping.first();
- Integer numRows = rowIdMapping.second();
- arrowColumnVectors[i] =
ColumnVectorWithFilter.forHolder(vectorHolders[i], rowIdMap, numRows);
+ ColumnBatchLoader(int numRowsToRead) {
+ initRowIdMapping(numRowsToRead);
+ loadDataToColumnBatch(numRowsToRead);
+ }
+
+ ColumnarBatch loadDataToColumnBatch(int numRowsToRead) {
+ Preconditions.checkArgument(numRowsToRead > 0, "Invalid number of rows
to read: %s", numRowsToRead);
+ ColumnVector[] arrowColumnVectors =
readDataToColumnVectors(numRowsToRead);
+
+ columnarBatch = new ColumnarBatch(arrowColumnVectors);
+ columnarBatch.setNumRows(numRows);
+
+ if (hasEqDeletes()) {
+ applyEqDelete();
}
+ return columnarBatch;
}
- rowStartPosInBatch += numRowsToRead;
- ColumnarBatch batch = new ColumnarBatch(arrowColumnVectors);
+ ColumnVector[] readDataToColumnVectors(int numRowsToRead) {
Review comment:
This could be private too
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]