liurenjie1024 commented on code in PR #12298:
URL: https://github.com/apache/iceberg/pull/12298#discussion_r1995602495


##########
core/src/main/java/org/apache/iceberg/io/datafile/AppenderBuilder.java:
##########
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iceberg.io.datafile;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import org.apache.iceberg.MetricsConfig;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.Table;
+import org.apache.iceberg.io.FileAppender;
+
+/**
+ * Interface which should be implemented by the data file format 
implementations.
+ *
+ * @param <A> type returned by builder API to allow chained calls
+ * @param <E> type for the engine specific schema
+ */
+public interface AppenderBuilder<A extends AppenderBuilder<A, E>, E> {
+  /**
+   * Sets the appender configurations coming from the table like {@link 
#schema(Schema)}, {@link
+   * #set(String, String)} and {@link #metricsConfig(MetricsConfig)}.
+   */
+  A forTable(Table table);
+
+  /** Set the file schema. */
+  A schema(Schema newSchema);
+
+  /** Set the file schema's root name. */
+  default A named(String newName) {
+    throw new UnsupportedOperationException("Not supported");
+  }
+
+  /**
+   * Set a writer configuration property.
+   *
+   * <p>Write configuration affects writer behavior. To add file metadata 
properties, use {@link
+   * #meta(String, String)}.
+   *
+   * @param property a writer config property name
+   * @param value config value
+   * @return this for method chaining
+   */
+  A set(String property, String value);
+
+  /**
+   * Set a file metadata property.
+   *
+   * <p>Metadata properties are written into file metadata. To alter a writer 
configuration
+   * property, use {@link #set(String, String)}.
+   *
+   * @param property a file metadata property name
+   * @param value config value
+   * @return this for method chaining
+   */
+  A meta(String property, String value);
+
+  /** Sets the metrics configuration used for collecting column metrics for 
the created file. */
+  A metricsConfig(MetricsConfig newMetricsConfig);
+
+  /** Overwrite the file if it already exists. The default value is 
<code>false</code>. */
+  A overwrite(boolean enabled);
+
+  /**
+   * Sets the encryption key used for writing the file. If encryption is not 
supported by the reader
+   * then an exception should be thrown.
+   */
+  default A fileEncryptionKey(ByteBuffer encryptionKey) {
+    throw new UnsupportedOperationException("Not supported");
+  }
+
+  /**
+   * Sets the additional authentication data prefix used for writing the file. 
If encryption is not
+   * supported by the reader then an exception should be thrown.
+   */
+  default A aADPrefix(ByteBuffer aadPrefix) {
+    throw new UnsupportedOperationException("Not supported");
+  }
+
+  /** Sets the engine native schema for the appender. */
+  E engineSchema(E newEngineSchema);

Review Comment:
   A little confusing about this part, could we have an example?



##########
core/src/main/java/org/apache/iceberg/io/datafile/ReadBuilder.java:
##########
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iceberg.io.datafile;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.expressions.Expression;
+import org.apache.iceberg.io.CloseableIterable;
+import org.apache.iceberg.mapping.NameMapping;
+
+/**
+ * Builder API for reading Iceberg data files.
+ *
+ * @param <R> type of the reader
+ * @param <F> type of the records which are filtered by {@link DeleteFilter}
+ */
+public interface ReadBuilder<R extends ReadBuilder<R, F>, F> {
+  /**
+   * Restricts the read to the given range: [start, start + length).
+   *
+   * @param newStart the start position for this read
+   * @param newLength the length of the range this read should scan
+   */
+  R split(long newStart, long newLength);
+
+  /** Read only the given columns. */
+  R project(Schema newSchema);
+
+  /**
+   * Sets the reader to case-sensitive when matching column names. Readers 
might decide not to
+   * implement this feature. The default is behavior is case-sensitive.
+   */
+  default R caseInsensitive() {
+    return caseSensitive(false);
+  }
+
+  default R caseSensitive(boolean newCaseSensitive) {
+    // Just ignore case sensitivity if not available
+    return (R) this;
+  }
+
+  /**
+   * Enables record filtering. Some readers might not be able to do reader 
side filtering. In this
+   * case the reader might decide on returning every row. It is the caller's 
responsibility to apply
+   * the filter again.
+   */
+  default R filterRecords(boolean newFilterRecords) {
+    // Skip filtering if not available
+    return (R) this;
+  }
+
+  /**
+   * Pushes down the {@link Expression} filter for the reader to prevent 
reading unnecessary
+   * records. Some readers might not be able to filter some part of the 
exception. In this case the
+   * reader might return unfiltered or partially filtered rows. It is the 
caller's responsibility to
+   * apply the filter again.
+   */
+  default R filter(Expression newFilter) {
+    // Skip filtering if not available
+    return (R) this;
+  }
+
+  /**
+   * Sets configuration key/value pairs for the reader. Reader builders could 
ignore configuration
+   * keys not known for them.
+   */
+  R set(String key, String value);
+
+  /** Enables reusing the containers returned by the reader. Decreases 
pressure on GC. */
+  default R reuseContainers() {
+    return reuseContainers(true);
+  }
+
+  /**
+   * Reusing the containers returned by the reader decreases pressure on GC. 
Readers could decide to
+   * ignore the user provided setting if is not supported by them.
+   */
+  R reuseContainers(boolean newReuseContainers);
+
+  /** Sets the batch size for vectorized readers. */
+  default R recordsPerBatch(int numRowsPerBatch) {
+    throw new UnsupportedOperationException("Not supported");
+  }
+
+  /**
+   * Accessors for constant field values. Used for calculating values it the 
result which are coming
+   * from metadata, and not coming from the data files themselves.
+   */
+  R idToConstant(Map<Integer, ?> newIdConstant);
+
+  /**
+   * Used for filtering out deleted records on the reader level. If delete 
filtering is not
+   * supported by the reader then the delete filter is ignored, and unfiltered 
results are returned.
+   * It is the caller's responsibility to apply the filter again.
+   */
+  default R withDeleteFilter(DeleteFilter<F> newDeleteFilter) {

Review Comment:
   I'm not sure if we should passdown this to a file format reader.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@iceberg.apache.org
For additional commands, e-mail: issues-h...@iceberg.apache.org

Reply via email to