lirui-apache commented on a change in pull request #3186:
URL: https://github.com/apache/iceberg/pull/3186#discussion_r716679524



##########
File path: 
spark/src/main/java/org/apache/iceberg/spark/source/BatchDataReader.java
##########
@@ -59,6 +66,80 @@
     this.batchSize = size;
   }
 
+  protected Map<Integer, ?> findMoreCompoundConstants(Schema tableSchema, 
Map<Integer, ?> idToConstant) {
+    Map<Integer, Object> compoundTypeConstants = new HashMap<>();

Review comment:
       Why do we need a new map here?

##########
File path: 
spark/src/main/java/org/apache/iceberg/spark/source/BatchDataReader.java
##########
@@ -59,6 +66,80 @@
     this.batchSize = size;
   }
 
+  protected Map<Integer, ?> findMoreCompoundConstants(Schema tableSchema, 
Map<Integer, ?> idToConstant) {
+    Map<Integer, Object> compoundTypeConstants = new HashMap<>();
+    List<Types.NestedField> columns = tableSchema.columns();
+
+    for (Types.NestedField nestedField: columns) {
+      compoundTypeConstants = visitCompoundTypesByDFS(nestedField, 
idToConstant);
+    }
+    compoundTypeConstants.putAll(idToConstant);
+    return compoundTypeConstants;
+  }
+
+  protected boolean containsCompoundType(Schema tableSchema) {
+    List<Types.NestedField> columns = tableSchema.columns();
+    for(Types.NestedField nestedField: columns) {
+      if (nestedField.type().isStructType() || nestedField.type().isMapType() 
|| nestedField.type().isListType()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  private final Map<Integer, Object> moreConstants = new HashMap<>();
+
+  protected Map<Integer, Object> visitCompoundTypesByDFS(Types.NestedField 
nestedField, Map<Integer, ?> idToConstant) {
+    switch (nestedField.type().typeId()) {
+      case STRUCT:
+        List<Types.NestedField> childFieldsInStruct = 
nestedField.type().asStructType().fields();
+        List<Integer> childFieldID = new ArrayList<>();
+        for (Types.NestedField childField :childFieldsInStruct) {
+          visitCompoundTypesByDFS(childField, idToConstant);
+          childFieldID.add(childField.fieldId());
+        }
+        if (idToConstant.keySet().containsAll(childFieldID)) {
+          Object[] objects = new Object[childFieldID.size()];
+          for (int i =0; i < objects.length; i++) {
+            objects[i] = idToConstant.get(childFieldID.get(i));
+          }
+          moreConstants.put(nestedField.fieldId(), new 
GenericInternalRow(objects));

Review comment:
       Why not just reuse `idToConstant`

##########
File path: 
spark/src/main/java/org/apache/iceberg/spark/source/BatchDataReader.java
##########
@@ -59,6 +66,80 @@
     this.batchSize = size;
   }
 
+  protected Map<Integer, ?> findMoreCompoundConstants(Schema tableSchema, 
Map<Integer, ?> idToConstant) {
+    Map<Integer, Object> compoundTypeConstants = new HashMap<>();
+    List<Types.NestedField> columns = tableSchema.columns();
+
+    for (Types.NestedField nestedField: columns) {
+      compoundTypeConstants = visitCompoundTypesByDFS(nestedField, 
idToConstant);
+    }
+    compoundTypeConstants.putAll(idToConstant);
+    return compoundTypeConstants;
+  }
+
+  protected boolean containsCompoundType(Schema tableSchema) {

Review comment:
       I don't think we need this. `visitCompoundTypesByDFS` skips primitive 
types anyway.

##########
File path: 
spark/src/main/java/org/apache/iceberg/spark/source/RowDataReader.java
##########
@@ -78,6 +89,81 @@
     return deletes.filter(open(task, requiredSchema, idToConstant)).iterator();
   }
 
+  protected boolean containsCompoundType(Schema tableSchema) {
+    List<Types.NestedField> columns = tableSchema.columns();
+    for(Types.NestedField nestedField: columns) {
+      if (nestedField.type().isStructType() || nestedField.type().isMapType() 
|| nestedField.type().isListType()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  protected Map<Integer, ?> findMoreCompoundConstants(Schema tableSchema, 
Map<Integer, ?> idToConstant) {

Review comment:
       I think we can extract this into some util method. No need to have 
separate implementations for vectorized/non-vectorized readers.

##########
File path: 
spark/src/main/java/org/apache/iceberg/spark/source/BatchDataReader.java
##########
@@ -59,6 +66,80 @@
     this.batchSize = size;
   }
 
+  protected Map<Integer, ?> findMoreCompoundConstants(Schema tableSchema, 
Map<Integer, ?> idToConstant) {
+    Map<Integer, Object> compoundTypeConstants = new HashMap<>();
+    List<Types.NestedField> columns = tableSchema.columns();
+
+    for (Types.NestedField nestedField: columns) {
+      compoundTypeConstants = visitCompoundTypesByDFS(nestedField, 
idToConstant);
+    }
+    compoundTypeConstants.putAll(idToConstant);
+    return compoundTypeConstants;
+  }
+
+  protected boolean containsCompoundType(Schema tableSchema) {
+    List<Types.NestedField> columns = tableSchema.columns();
+    for(Types.NestedField nestedField: columns) {
+      if (nestedField.type().isStructType() || nestedField.type().isMapType() 
|| nestedField.type().isListType()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  private final Map<Integer, Object> moreConstants = new HashMap<>();
+
+  protected Map<Integer, Object> visitCompoundTypesByDFS(Types.NestedField 
nestedField, Map<Integer, ?> idToConstant) {
+    switch (nestedField.type().typeId()) {
+      case STRUCT:
+        List<Types.NestedField> childFieldsInStruct = 
nestedField.type().asStructType().fields();
+        List<Integer> childFieldID = new ArrayList<>();
+        for (Types.NestedField childField :childFieldsInStruct) {
+          visitCompoundTypesByDFS(childField, idToConstant);
+          childFieldID.add(childField.fieldId());
+        }
+        if (idToConstant.keySet().containsAll(childFieldID)) {
+          Object[] objects = new Object[childFieldID.size()];
+          for (int i =0; i < objects.length; i++) {
+            objects[i] = idToConstant.get(childFieldID.get(i));
+          }
+          moreConstants.put(nestedField.fieldId(), new 
GenericInternalRow(objects));
+        }
+        break;
+      case MAP:
+        int keyId = nestedField.type().asMapType().keyId();
+        int valueId = nestedField.type().asMapType().valueId();
+        visitCompoundTypesByDFS(nestedField, idToConstant);

Review comment:
       Will this lead to infinite recursion?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to