Jackie-Jiang commented on a change in pull request #8335: URL: https://github.com/apache/pinot/pull/8335#discussion_r826468594
########## File path: pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java ########## @@ -0,0 +1,149 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pinot.core.data.manager.realtime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer; +import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer; +import org.apache.pinot.segment.local.recordtransformer.RecordTransformer; +import org.apache.pinot.segment.local.utils.IngestionUtils; +import org.apache.pinot.spi.config.table.TableConfig; +import org.apache.pinot.spi.data.Schema; +import org.apache.pinot.spi.data.readers.GenericRow; + + +/** + * The class for transforming validating GenericRow data against table schema and table config. + * It is used mainly but not limited by RealTimeDataManager for each row that is going to be indexed into Pinot. + */ +public class TransformPipeline { + private final RecordTransformer _recordTransformer; + private final ComplexTypeTransformer _complexTypeTransformer; + + /** + * Constructing a transform pipeline based on TableConfig and table schema. + * @param tableConfig the config for the table + * @param schema the table schema + */ + public TransformPipeline(TableConfig tableConfig, Schema schema) { + // Create record transformer + _recordTransformer = CompositeTransformer.getDefaultTransformer(tableConfig, schema); + + // Create complex type transformer + _complexTypeTransformer = ComplexTypeTransformer.getComplexTypeTransformer(tableConfig); + } + + /** + * Process and validate the decoded row against schema. + * @param decodedRow the row data to pass in + * @param reusedResult the reused result so we can reduce objects created for each row + * @return both processed rows and failed rows in a struct. + * @throws TransformException when data has issues like schema validation. Fetch the partialResult from Exception + */ + public Result processRow(GenericRow decodedRow, Result reusedResult) throws TransformException { Review comment: (code format) Please reformat with [Pinot Style](https://docs.pinot.apache.org/developers/developers-and-contributors/code-setup#intellij) ########## File path: pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java ########## @@ -0,0 +1,149 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pinot.core.data.manager.realtime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer; +import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer; +import org.apache.pinot.segment.local.recordtransformer.RecordTransformer; +import org.apache.pinot.segment.local.utils.IngestionUtils; +import org.apache.pinot.spi.config.table.TableConfig; +import org.apache.pinot.spi.data.Schema; +import org.apache.pinot.spi.data.readers.GenericRow; + + +/** + * The class for transforming validating GenericRow data against table schema and table config. + * It is used mainly but not limited by RealTimeDataManager for each row that is going to be indexed into Pinot. + */ +public class TransformPipeline { + private final RecordTransformer _recordTransformer; + private final ComplexTypeTransformer _complexTypeTransformer; + + /** + * Constructing a transform pipeline based on TableConfig and table schema. + * @param tableConfig the config for the table + * @param schema the table schema + */ + public TransformPipeline(TableConfig tableConfig, Schema schema) { + // Create record transformer + _recordTransformer = CompositeTransformer.getDefaultTransformer(tableConfig, schema); + + // Create complex type transformer + _complexTypeTransformer = ComplexTypeTransformer.getComplexTypeTransformer(tableConfig); + } + + /** + * Process and validate the decoded row against schema. + * @param decodedRow the row data to pass in + * @param reusedResult the reused result so we can reduce objects created for each row + * @return both processed rows and failed rows in a struct. + * @throws TransformException when data has issues like schema validation. Fetch the partialResult from Exception + */ + public Result processRow(GenericRow decodedRow, Result reusedResult) throws TransformException { + // to keep track and add to "failedRows" when exception happens + if (reusedResult == null) { Review comment: `reusedResult` should never be `null`. Going over the code again, I think we can just make the return `void`, and always ask the caller to pass in the result buffer. This way we don't need to explicitly try-catch the exception but let the caller decide how to handle the exception with the intermediate result in the result buffer. It is more flexible, and we can achieve different exception handling logic on the caller side instead of fixing it in this class. ########## File path: pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java ########## @@ -0,0 +1,149 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pinot.core.data.manager.realtime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer; +import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer; +import org.apache.pinot.segment.local.recordtransformer.RecordTransformer; +import org.apache.pinot.segment.local.utils.IngestionUtils; +import org.apache.pinot.spi.config.table.TableConfig; +import org.apache.pinot.spi.data.Schema; +import org.apache.pinot.spi.data.readers.GenericRow; + + +/** + * The class for transforming validating GenericRow data against table schema and table config. + * It is used mainly but not limited by RealTimeDataManager for each row that is going to be indexed into Pinot. + */ +public class TransformPipeline { + private final RecordTransformer _recordTransformer; + private final ComplexTypeTransformer _complexTypeTransformer; + + /** + * Constructing a transform pipeline based on TableConfig and table schema. + * @param tableConfig the config for the table + * @param schema the table schema + */ + public TransformPipeline(TableConfig tableConfig, Schema schema) { + // Create record transformer + _recordTransformer = CompositeTransformer.getDefaultTransformer(tableConfig, schema); + + // Create complex type transformer + _complexTypeTransformer = ComplexTypeTransformer.getComplexTypeTransformer(tableConfig); + } + + /** + * Process and validate the decoded row against schema. + * @param decodedRow the row data to pass in + * @param reusedResult the reused result so we can reduce objects created for each row + * @return both processed rows and failed rows in a struct. + * @throws TransformException when data has issues like schema validation. Fetch the partialResult from Exception + */ + public Result processRow(GenericRow decodedRow, Result reusedResult) throws TransformException { + // to keep track and add to "failedRows" when exception happens Review comment: Remove this comment as it doesn't apply ########## File path: pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/TransformPipeline.java ########## @@ -0,0 +1,149 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pinot.core.data.manager.realtime; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.pinot.segment.local.recordtransformer.ComplexTypeTransformer; +import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer; +import org.apache.pinot.segment.local.recordtransformer.RecordTransformer; +import org.apache.pinot.segment.local.utils.IngestionUtils; +import org.apache.pinot.spi.config.table.TableConfig; +import org.apache.pinot.spi.data.Schema; +import org.apache.pinot.spi.data.readers.GenericRow; + + +/** + * The class for transforming validating GenericRow data against table schema and table config. + * It is used mainly but not limited by RealTimeDataManager for each row that is going to be indexed into Pinot. + */ +public class TransformPipeline { + private final RecordTransformer _recordTransformer; + private final ComplexTypeTransformer _complexTypeTransformer; + + /** + * Constructing a transform pipeline based on TableConfig and table schema. + * @param tableConfig the config for the table + * @param schema the table schema + */ + public TransformPipeline(TableConfig tableConfig, Schema schema) { + // Create record transformer + _recordTransformer = CompositeTransformer.getDefaultTransformer(tableConfig, schema); + + // Create complex type transformer + _complexTypeTransformer = ComplexTypeTransformer.getComplexTypeTransformer(tableConfig); + } + + /** + * Process and validate the decoded row against schema. + * @param decodedRow the row data to pass in + * @param reusedResult the reused result so we can reduce objects created for each row + * @return both processed rows and failed rows in a struct. + * @throws TransformException when data has issues like schema validation. Fetch the partialResult from Exception + */ + public Result processRow(GenericRow decodedRow, Result reusedResult) throws TransformException { + // to keep track and add to "failedRows" when exception happens + if (reusedResult == null) { + reusedResult = new Result(); + } else { + reusedResult.reset(); + } + try { + if (_complexTypeTransformer != null) { + // TODO: consolidate complex type transformer into composite type transformer + decodedRow = _complexTypeTransformer.transform(decodedRow); + } + Collection<GenericRow> rows = (Collection<GenericRow>) decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY); + if (rows != null) { + for (GenericRow row : rows) { + GenericRow transformedRow = _recordTransformer.transform(row); + if (transformedRow != null && IngestionUtils.shouldIngestRow(transformedRow)) { + reusedResult.addTransformedRows(transformedRow); + } else { + reusedResult.incSkippedRowCount(1); + } + } + } else { + GenericRow transformedRow = _recordTransformer.transform(decodedRow); + if (transformedRow != null && IngestionUtils.shouldIngestRow(transformedRow)) { + reusedResult.addTransformedRows(transformedRow); + } else { + reusedResult.incSkippedRowCount(1); + } + } + return reusedResult; + } catch (Exception ex) { + // when exception happens, we abandon the transformed row record, but keep the failed count properly + int skippedCount = reusedResult.getSkippedRowCount(); + reusedResult.reset(); + reusedResult.incSkippedRowCount(skippedCount + 1); + throw new TransformException("Encountered error while processing row", reusedResult, ex); + } + } + + /** + * Wrapper for transforming results. For efficiency, right now the failed rows have only a counter + */ + public static class Result { + private final List<GenericRow> _transformedRows = new ArrayList<>(); + private int _skippedRowCount = 0; + + public List<GenericRow> getTransformedRows() { + return _transformedRows; + } + + public int getSkippedRowCount() { + return _skippedRowCount; + } + + public void addTransformedRows(GenericRow row) { + _transformedRows.add(row); + } + + public void incSkippedRowCount(int increment) { Review comment: Remove the argument and always `++` ########## File path: pinot-core/src/main/java/org/apache/pinot/core/data/manager/realtime/LLRealtimeSegmentDataManager.java ########## @@ -510,42 +506,24 @@ private void processStreamEvents(MessageBatch messagesAndOffsets, long idlePipeS .decode(messagesAndOffsets.getMessageAtIndex(index), messagesAndOffsets.getMessageOffsetAtIndex(index), messagesAndOffsets.getMessageLengthAtIndex(index), reuse); if (decodedRow != null) { - List<GenericRow> transformedRows = new ArrayList<>(); try { - if (_complexTypeTransformer != null) { - // TODO: consolidate complex type transformer into composite type transformer - decodedRow = _complexTypeTransformer.transform(decodedRow); - } - Collection<GenericRow> rows = (Collection<GenericRow>) decodedRow.getValue(GenericRow.MULTIPLE_RECORDS_KEY); - if (rows != null) { - for (GenericRow row : rows) { - GenericRow transformedRow = _recordTransformer.transform(row); - if (transformedRow != null && IngestionUtils.shouldIngestRow(row)) { - transformedRows.add(transformedRow); - } else { - realtimeRowsDroppedMeter = - _serverMetrics.addMeteredTableValue(_metricKeyName, ServerMeter.INVALID_REALTIME_ROWS_DROPPED, 1, - realtimeRowsDroppedMeter); - } - } - } else { - GenericRow transformedRow = _recordTransformer.transform(decodedRow); - if (transformedRow != null && IngestionUtils.shouldIngestRow(transformedRow)) { - transformedRows.add(transformedRow); - } else { - realtimeRowsDroppedMeter = - _serverMetrics.addMeteredTableValue(_metricKeyName, ServerMeter.INVALID_REALTIME_ROWS_DROPPED, 1, - realtimeRowsDroppedMeter); - } - } - } catch (Exception e) { + reusedResult = _transformPipeline.processRow(decodedRow, reusedResult); + } catch (TransformPipeline.TransformException e) { _numRowsErrored++; String errorMessage = String.format("Caught exception while transforming the record: %s", decodedRow); _segmentLogger.error(errorMessage, e); _realtimeTableDataManager.addSegmentError(_segmentNameStr, new SegmentErrorInfo(System.currentTimeMillis(), errorMessage, e)); + // for a row with multiple records (multi rows), if we encounter exception in the middle, + // there could be some rows that are processed successfully. We still wish to process them. + reusedResult = e.getPartialResult(); Review comment: Remove this -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
