pvary commented on code in PR #12298: URL: https://github.com/apache/iceberg/pull/12298#discussion_r2552407075
########## parquet/src/main/java/org/apache/iceberg/parquet/ParquetFormatModel.java: ########## @@ -0,0 +1,394 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.parquet; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.function.Function; +import org.apache.iceberg.FileContent; +import org.apache.iceberg.FileFormat; +import org.apache.iceberg.MetricsConfig; +import org.apache.iceberg.Schema; +import org.apache.iceberg.data.parquet.GenericParquetWriter; +import org.apache.iceberg.encryption.EncryptedOutputFile; +import org.apache.iceberg.expressions.Expression; +import org.apache.iceberg.formats.FormatModel; +import org.apache.iceberg.formats.ReadBuilder; +import org.apache.iceberg.formats.WriteBuilder; +import org.apache.iceberg.io.CloseableIterable; +import org.apache.iceberg.io.DeleteSchemaUtil; +import org.apache.iceberg.io.FileAppender; +import org.apache.iceberg.io.InputFile; +import org.apache.iceberg.mapping.NameMapping; +import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.relocated.com.google.common.collect.Maps; +import org.apache.parquet.column.ParquetProperties; +import org.apache.parquet.schema.MessageType; + +public class ParquetFormatModel<D, S, F> implements FormatModel<D, S> { + public static final String WRITER_VERSION_KEY = "parquet.writer.version"; + + private final Class<? extends D> registerType; + private final Class<S> schemaType; + private final ReaderFunction<D> readerFunction; + private final BatchReaderFunction<D, F> batchReaderFunction; + private final WriterFunction<S> writerFunction; + + private ParquetFormatModel( + Class<? extends D> registerType, + Class<S> schemaType, + ReaderFunction<D> readerFunction, + BatchReaderFunction<D, F> batchReaderFunction, + WriterFunction<S> writerFunction) { + this.registerType = registerType; + this.schemaType = schemaType; + this.readerFunction = readerFunction; + this.batchReaderFunction = batchReaderFunction; + this.writerFunction = writerFunction; + } + + public ParquetFormatModel(Class<D> type) { + this(type, null, null, null, null); + } + + public ParquetFormatModel( + Class<D> type, + Class<S> schemaType, + ReaderFunction<D> readerFunction, + WriterFunction<S> writerFunction) { + this(type, schemaType, readerFunction, null, writerFunction); + } + + public ParquetFormatModel( + Class<? extends D> returnType, + Class<S> schemaType, + BatchReaderFunction<D, F> batchReaderFunction) { + this(returnType, schemaType, null, (BatchReaderFunction<D, F>) batchReaderFunction, null); + } + + @Override + public FileFormat format() { + return FileFormat.PARQUET; + } + + @Override + public Class<D> type() { + return (Class<D>) registerType; + } + + @Override + public Class<S> schemaType() { + return schemaType; + } + + @Override + public WriteBuilder<D, S> writeBuilder(EncryptedOutputFile outputFile) { + return new WriteBuilderWrapper<>(outputFile, writerFunction); + } + + @Override + public ReadBuilder<D, S> readBuilder(InputFile inputFile) { + if (readerFunction != null) { + return new NonBatchReaderWrapper<>(inputFile, readerFunction); + } else if (batchReaderFunction != null) { + return new BatchReaderWrapper<>(inputFile, batchReaderFunction); + } else { + throw new IllegalStateException("Either readerFunction or batchReaderFunction must be set"); + } + } + + @FunctionalInterface + public interface ReaderFunction<D> { + ParquetValueReader<D> read( + Schema schema, MessageType messageType, Map<Integer, ?> constantValues); + } + + @FunctionalInterface + public interface BatchReaderFunction<D, F> { + VectorizedReader<D> read( + Schema schema, + MessageType messageType, + Map<Integer, ?> constantValues, + F deleteFilter, + Map<String, String> config); + } + + @FunctionalInterface + public interface WriterFunction<S> { + ParquetValueWriter<?> write(Schema icebergSchema, MessageType messageType, S engineSchema); + } + + public interface SupportsDeleteFilter<F> { + void deleteFilter(F deleteFilter); + } + + private static class WriteBuilderWrapper<D, S> implements WriteBuilder<D, S> { + private final Parquet.WriteBuilder internal; + private final WriterFunction<S> writerFunction; + private S inputSchema; + + private WriteBuilderWrapper(EncryptedOutputFile outputFile, WriterFunction<S> writerFunction) { + this.internal = Parquet.write(outputFile); + this.writerFunction = writerFunction; + } + + @Override + public WriteBuilder<D, S> schema(Schema schema) { + internal.schema(schema); + return this; + } + + @Override + public WriteBuilder<D, S> inputSchema(S schema) { + this.inputSchema = schema; + return this; + } + + @Override + public WriteBuilder<D, S> set(String property, String value) { + if (WRITER_VERSION_KEY.equals(property)) { + internal.writerVersion(ParquetProperties.WriterVersion.valueOf(value)); + } + + internal.set(property, value); + return this; + } + + @Override + public WriteBuilder<D, S> setAll(Map<String, String> properties) { + internal.setAll(properties); + return this; + } + + @Override + public WriteBuilder<D, S> meta(String property, String value) { + internal.meta(property, value); + return this; + } + + @Override + public WriteBuilder<D, S> meta(Map<String, String> properties) { + internal.meta(properties); + return this; + } + + @Override + public WriteBuilder<D, S> content(FileContent content) { + switch (content) { + case DATA: + internal.createContextFunc(Parquet.WriteBuilder.Context::dataContext); + internal.createWriterFunc( + (icebergSchema, messageType) -> + writerFunction.write(icebergSchema, messageType, inputSchema)); + break; + case EQUALITY_DELETES: + internal.createContextFunc(Parquet.WriteBuilder.Context::deleteContext); + internal.createWriterFunc( + (icebergSchema, messageType) -> + writerFunction.write(icebergSchema, messageType, inputSchema)); + break; + case POSITION_DELETES: + internal.createContextFunc(Parquet.WriteBuilder.Context::deleteContext); + internal.createWriterFunc( + (icebergSchema, messageType) -> + new ParquetValueWriters.PositionDeleteStructWriter<D>( + (ParquetValueWriters.StructWriter<?>) + GenericParquetWriter.create(icebergSchema, messageType), + Function.identity())); + internal.schema(DeleteSchemaUtil.pathPosSchema()); + break; + default: + throw new IllegalArgumentException("Unknown file content: " + content); + } + + return this; + } + + @Override + public WriteBuilder<D, S> metricsConfig(MetricsConfig metricsConfig) { + internal.metricsConfig(metricsConfig); + return this; + } + + @Override + public WriteBuilder<D, S> overwrite() { + internal.overwrite(); + return this; + } + + @Override + public WriteBuilder<D, S> withFileEncryptionKey(ByteBuffer encryptionKey) { + internal.withFileEncryptionKey(encryptionKey); + return this; + } + + @Override + public WriteBuilder<D, S> withAADPrefix(ByteBuffer aadPrefix) { + internal.withAADPrefix(aadPrefix); + return this; + } + + @Override + public FileAppender<D> build() throws IOException { + return internal.build(); + } + } + + private abstract static class ReadBuilderWrapper<D, S, F> implements ReadBuilder<D, S> { Review Comment: Most of this became irrelevant after the DeleteFilter change (#14065) These points might still be worth discussing: > This doesn't need to track the Iceberg schema. Instead, this should register a binary reader function with Parquet so that Parquet is responsible for passing the Iceberg schema. Are you suggesting updating the underlying `Parquet` class to add a method like: `ReadBuilder.createBatchedReaderFunc(BiFunction<Schema, MessageType, VectorizedReader<?>> newReaderFunction)`? I’ve made this change, but it turned out to be a bit more involved to keep it consistent with `Parquet.BinaryReaderFunction`. Please, review! > Also, shouldn't this account for the DF schema? I originally included it, but several reviewers noted that it’s not currently used. So, asked to remove it for now and add it back when needed. I’m open to either approach. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
