vvivekiyer commented on code in PR #9678:
URL: https://github.com/apache/pinot/pull/9678#discussion_r1011006867
##########
pinot-segment-local/src/main/java/org/apache/pinot/segment/local/segment/index/loader/ForwardIndexHandler.java:
##########
@@ -271,118 +306,293 @@ private void rewriteRawSVForwardIndex(String column,
ColumnMetadata existingColM
.forForwardIndex(newCompressionType,
_indexLoadingConfig.getColumnProperties());
try (ForwardIndexCreator creator =
indexCreatorProvider.newForwardIndexCreator(context)) {
- // If creator stored type and the reader stored type do not match,
throw an exception.
if (!reader.getStoredType().equals(creator.getValueType())) {
+ // Creator stored type should match reader stored type for raw
columns. We do not support changing datatypes.
String failureMsg =
"Unsupported operation to change datatype for column=" + column
+ " from " + reader.getStoredType()
.toString() + " to " + creator.getValueType().toString();
throw new UnsupportedOperationException(failureMsg);
}
int numDocs = existingColMetadata.getTotalDocs();
- forwardIndexWriterHelper(column, reader, creator, numDocs);
+ forwardIndexWriterHelper(column, existingColMetadata, reader, creator,
numDocs, null);
}
}
}
- private void forwardIndexWriterHelper(String column, ForwardIndexReader
reader, ForwardIndexCreator creator,
- int numDocs) {
- // If creator stored type should match reader stored type. We do not
support changing datatypes.
- if (!reader.getStoredType().equals(creator.getValueType())) {
- String failureMsg =
- "Unsupported operation to change datatype for column=" + column + "
from " + reader.getStoredType().toString()
- + " to " + creator.getValueType().toString();
- throw new UnsupportedOperationException(failureMsg);
- }
-
+ private void forwardIndexWriterHelper(String column, ColumnMetadata
existingColumnMetadata, ForwardIndexReader reader,
+ ForwardIndexCreator creator, int numDocs, @Nullable
SegmentDictionaryCreator dictionaryCreator) {
ForwardIndexReaderContext readerContext = reader.createContext();
boolean isSVColumn = reader.isSingleValue();
- switch (reader.getStoredType()) {
- // JSON fields are either stored as string or bytes. No special handling
is needed because we make this
- // decision based on the storedType of the reader.
- case INT: {
- for (int i = 0; i < numDocs; i++) {
- if (isSVColumn) {
- int val = reader.getInt(i, readerContext);
- creator.putInt(val);
- } else {
- int[] ints = reader.getIntMV(i, readerContext);
- creator.putIntMV(ints);
- }
+ if (dictionaryCreator != null) {
+ int maxNumValuesPerEntry =
existingColumnMetadata.getMaxNumberOfMultiValues();
+ PinotSegmentColumnReader columnReader = new
PinotSegmentColumnReader(reader, null, null, maxNumValuesPerEntry);
+
+ for (int i = 0; i < numDocs; i++) {
+ Object obj = columnReader.getValue(i);
+
+ if (isSVColumn) {
+ int dictId = dictionaryCreator.indexOfSV(obj);
+ creator.putDictId(dictId);
+ } else {
+ int[] dictIds = dictionaryCreator.indexOfMV(obj);
+ creator.putDictIdMV(dictIds);
}
- break;
}
- case LONG: {
- for (int i = 0; i < numDocs; i++) {
- if (isSVColumn) {
- long val = reader.getLong(i, readerContext);
- creator.putLong(val);
- } else {
- long[] longs = reader.getLongMV(i, readerContext);
- creator.putLongMV(longs);
+ } else {
+ switch (reader.getStoredType()) {
+ // JSON fields are either stored as string or bytes. No special
handling is needed because we make this
+ // decision based on the storedType of the reader.
+ case INT: {
+ for (int i = 0; i < numDocs; i++) {
+ if (isSVColumn) {
+ int val = reader.getInt(i, readerContext);
+ creator.putInt(val);
+ } else {
+ int[] ints = reader.getIntMV(i, readerContext);
+ creator.putIntMV(ints);
+ }
}
+ break;
}
- break;
- }
- case FLOAT: {
- for (int i = 0; i < numDocs; i++) {
- if (isSVColumn) {
- float val = reader.getFloat(i, readerContext);
- creator.putFloat(val);
- } else {
- float[] floats = reader.getFloatMV(i, readerContext);
- creator.putFloatMV(floats);
+ case LONG: {
+ for (int i = 0; i < numDocs; i++) {
+ if (isSVColumn) {
+ long val = reader.getLong(i, readerContext);
+ creator.putLong(val);
+ } else {
+ long[] longs = reader.getLongMV(i, readerContext);
+ creator.putLongMV(longs);
+ }
}
+ break;
}
- break;
- }
- case DOUBLE: {
- for (int i = 0; i < numDocs; i++) {
- if (isSVColumn) {
- double val = reader.getDouble(i, readerContext);
- creator.putDouble(val);
- } else {
- double[] doubles = reader.getDoubleMV(i, readerContext);
- creator.putDoubleMV(doubles);
+ case FLOAT: {
+ for (int i = 0; i < numDocs; i++) {
+ if (isSVColumn) {
+ float val = reader.getFloat(i, readerContext);
+ creator.putFloat(val);
+ } else {
+ float[] floats = reader.getFloatMV(i, readerContext);
+ creator.putFloatMV(floats);
+ }
}
+ break;
}
- break;
- }
- case STRING: {
- for (int i = 0; i < numDocs; i++) {
- if (isSVColumn) {
- String val = reader.getString(i, readerContext);
- creator.putString(val);
- } else {
- String[] strings = reader.getStringMV(i, readerContext);
- creator.putStringMV(strings);
+ case DOUBLE: {
+ for (int i = 0; i < numDocs; i++) {
+ if (isSVColumn) {
+ double val = reader.getDouble(i, readerContext);
+ creator.putDouble(val);
+ } else {
+ double[] doubles = reader.getDoubleMV(i, readerContext);
+ creator.putDoubleMV(doubles);
+ }
}
+ break;
}
- break;
- }
- case BYTES: {
- for (int i = 0; i < numDocs; i++) {
- if (isSVColumn) {
- byte[] val = reader.getBytes(i, readerContext);
- creator.putBytes(val);
- } else {
- byte[][] bytesArray = reader.getBytesMV(i, readerContext);
- creator.putBytesMV(bytesArray);
+ case STRING: {
+ for (int i = 0; i < numDocs; i++) {
+ if (isSVColumn) {
+ String val = reader.getString(i, readerContext);
+ creator.putString(val);
+ } else {
+ String[] strings = reader.getStringMV(i, readerContext);
+ creator.putStringMV(strings);
+ }
}
+ break;
}
- break;
- }
- case BIG_DECIMAL: {
- for (int i = 0; i < numDocs; i++) {
+ case BYTES: {
+ for (int i = 0; i < numDocs; i++) {
+ if (isSVColumn) {
+ byte[] val = reader.getBytes(i, readerContext);
+ creator.putBytes(val);
+ } else {
+ byte[][] bytesArray = reader.getBytesMV(i, readerContext);
+ creator.putBytesMV(bytesArray);
+ }
+ }
+ break;
+ }
+ case BIG_DECIMAL: {
Preconditions.checkState(isSVColumn, "BigDecimal is not supported
for MV columns");
- BigDecimal val = reader.getBigDecimal(i, readerContext);
- creator.putBigDecimal(val);
+ for (int i = 0; i < numDocs; i++) {
+ BigDecimal val = reader.getBigDecimal(i, readerContext);
+ creator.putBigDecimal(val);
+ }
+ break;
}
- break;
+ default:
+ throw new IllegalStateException();
+ }
+ }
+ }
+
+ private void enableDictionary(String column, SegmentDirectory.Writer
segmentWriter,
+ IndexCreatorProvider indexCreatorProvider)
+ throws Exception {
+ Preconditions.checkState(_segmentMetadata.getVersion() ==
SegmentVersion.v3);
Review Comment:
We already have an "if" condition where we avoid computing a valid operation
if segment version is < v3.
I have these checks only in the entry functions: `rewriteRawForwardIndex`,
`enableDictionary` and `disableDictionary`. My reasoning for keeping it is 2
fold:
1. Looking at the check, it's immediately evident for the reader that this
function works only for v3.
2. If someone decides to write a test and directly call this function, this
would immediately fire.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]