danepitkin commented on code in PR #35570:
URL: https://github.com/apache/arrow/pull/35570#discussion_r1317322786


##########
java/dataset/src/test/java/org/apache/arrow/dataset/substrait/TestAceroSubstraitConsumer.java:
##########
@@ -204,4 +205,167 @@ public void testRunBinaryQueryNamedTableNation() throws 
Exception {
       }
     }
   }
+
+  @Test
+  public void testBaseParquetReadWithExtendedExpressionsFilter() throws 
Exception {
+    final Schema schema = new Schema(Arrays.asList(
+        Field.nullable("id", new ArrowType.Int(32, true)),
+        Field.nullable("name", new ArrowType.Utf8())
+    ), null);
+    // Substrait Extended Expression: Filter:
+    // Expression 01: WHERE ID < 20
+    String binarySubstraitExpressionFilter = 
"Ch4IARIaL2Z1bmN0aW9uc19jb21wYXJpc29uLnlhbWwSEhoQCAIQAhoKbHQ6YW55X2F" +
+        
"ueRo3ChwaGggCGgQKAhABIggaBhIECgISACIGGgQKAigUGhdmaWx0ZXJfaWRfbG93ZXJfdGhhbl8yMCIaCgJJRAoETkFNRRIOCgQqAhA"
 +
+        "BCgRiAhABGAI=";
+    byte[] arrayByteSubstraitExpressionFilter = 
Base64.getDecoder().decode(binarySubstraitExpressionFilter);
+    ByteBuffer substraitExpressionFilter = 
ByteBuffer.allocateDirect(arrayByteSubstraitExpressionFilter.length);
+    substraitExpressionFilter.put(arrayByteSubstraitExpressionFilter);
+    ParquetWriteSupport writeSupport = ParquetWriteSupport
+        .writeTempFile(AVRO_SCHEMA_USER, TMP.newFolder(), 19, "value_19", 1, 
"value_1",
+            11, "value_11", 21, "value_21", 45, "value_45");
+    ScanOptions options = new ScanOptions.Builder(/*batchSize*/ 32768)
+        .columns(Optional.empty())
+        .substraitExpressionFilter(substraitExpressionFilter)
+        .build();
+    try (
+        DatasetFactory datasetFactory = new 
FileSystemDatasetFactory(rootAllocator(), NativeMemoryPool.getDefault(),
+            FileFormat.PARQUET, writeSupport.getOutputURI());
+        Dataset dataset = datasetFactory.finish();
+        Scanner scanner = dataset.newScan(options);
+        ArrowReader reader = scanner.scanBatches()
+    ) {
+      assertEquals(schema.getFields(), 
reader.getVectorSchemaRoot().getSchema().getFields());
+      int rowcount = 0;
+      while (reader.loadNextBatch()) {
+        rowcount += reader.getVectorSchemaRoot().getRowCount();
+      }
+      assertEquals(3, rowcount);
+    }
+  }
+
+  @Test(expected = RuntimeException.class)
+  public void testBaseParquetReadWithExtendedExpressionsFilterException() 
throws Exception {
+    final Schema schema = new Schema(Arrays.asList(
+        Field.nullable("id", new ArrowType.Int(32, true)),
+        Field.nullable("name", new ArrowType.Utf8())
+    ), null);
+    // Substrait Extended Expression: Filter:
+    // Expression 01: WHERE ID < 20
+    // Expression 02: WHERE ID < 10
+    String binarySubstraitExpressionFilter = 
"Ch4IARIaL2Z1bmN0aW9uc19jb21wYXJpc29uLnlhbWwSEhoQCAIQAhoKbHQ6YW5" +
+        
"5X2FueRISGhAIAhACGgpsdDphbnlfYW55GjcKHBoaCAIaBAoCEAEiCBoGEgQKAhIAIgYaBAoCKBQaF2ZpbHRlcl9pZF9sb3dlcl9"
 +
+        
"0aGFuXzIwGjcKHBoaCAIaBAoCEAEiCBoGEgQKAhIAIgYaBAoCKAoaF2ZpbHRlcl9pZF9sb3dlcl90aGFuXzEwIhoKAklECgROQU1F"
 +
+        "Eg4KBCoCEAEKBGICEAEYAg==";
+    byte[] arrayByteSubstraitExpressionFilter = 
Base64.getDecoder().decode(binarySubstraitExpressionFilter);
+    ByteBuffer substraitExpressionFilter = 
ByteBuffer.allocateDirect(arrayByteSubstraitExpressionFilter.length);
+    substraitExpressionFilter.put(arrayByteSubstraitExpressionFilter);
+    ParquetWriteSupport writeSupport = ParquetWriteSupport
+        .writeTempFile(AVRO_SCHEMA_USER, TMP.newFolder(), 19, "value_19", 1, 
"value_1",
+            11, "value_11", 21, "value_21", 45, "value_45");
+    ScanOptions options = new ScanOptions.Builder(/*batchSize*/ 32768)
+        .columns(Optional.empty())
+        .substraitExpressionFilter(substraitExpressionFilter)
+        .build();
+    try (
+        DatasetFactory datasetFactory = new 
FileSystemDatasetFactory(rootAllocator(), NativeMemoryPool.getDefault(),
+            FileFormat.PARQUET, writeSupport.getOutputURI());
+        Dataset dataset = datasetFactory.finish();
+        Scanner scanner = dataset.newScan(options);
+        ArrowReader reader = scanner.scanBatches()
+    ) {
+      assertEquals(schema.getFields(), 
reader.getVectorSchemaRoot().getSchema().getFields());
+      int rowcount = 0;
+      while (reader.loadNextBatch()) {
+        rowcount += reader.getVectorSchemaRoot().getRowCount();
+      }
+      assertEquals(3, rowcount);
+    }
+  }
+
+  @Test
+  public void testBaseParquetReadWithExtendedExpressionsProject() throws 
Exception {
+    final Schema schema = new Schema(Arrays.asList(
+        Field.nullable("add_two_to_column_a", new ArrowType.Int(32, true)),
+        Field.nullable("concat_column_a_and_b", new ArrowType.Utf8())
+    ), null);
+    // Substrait Extended Expression: Project New Column:
+    // Expression ADD: id + 2
+    // Expression CONCAT: name + '-' + name
+    String binarySubstraitExpressionProject = 
"Ch4IARIaL2Z1bmN0aW9uc19hcml0aG1ldGljLnlhbWwSERoPCAEaC2FkZDppM" +
+        
"zJfaTMyEhQaEggCEAEaDGNvbmNhdDp2Y2hhchoxChoaGBoEKgIQASIIGgYSBAoCEgAiBhoECgIoAhoTYWRkX3R3b190b19jb2x1"
 +
+        
"bW5fYRpGCi0aKwgBGgRiAhABIgoaCBIGCgQSAggBIgkaBwoFYgMgLSAiChoIEgYKBBICCAEaFWNvbmNhdF9jb2x1bW5fYV9hbmR"
 +
+        "fYiIaCgJJRAoETkFNRRIOCgQqAhABCgRiAhABGAI=";
+    byte[] arrayByteSubstraitExpressionProject = 
Base64.getDecoder().decode(binarySubstraitExpressionProject);
+    ByteBuffer substraitExpressionProject = 
ByteBuffer.allocateDirect(arrayByteSubstraitExpressionProject.length);
+    substraitExpressionProject.put(arrayByteSubstraitExpressionProject);
+    ParquetWriteSupport writeSupport = ParquetWriteSupport
+        .writeTempFile(AVRO_SCHEMA_USER, TMP.newFolder(), 19, "value_19", 1, 
"value_1",
+            11, "value_11", 21, "value_21", 45, "value_45");
+    ScanOptions options = new ScanOptions.Builder(/*batchSize*/ 32768)
+        .columns(Optional.empty())
+         .substraitExpressionProjection(substraitExpressionProject)
+        .build();
+    try (
+        DatasetFactory datasetFactory = new 
FileSystemDatasetFactory(rootAllocator(), NativeMemoryPool.getDefault(),
+            FileFormat.PARQUET, writeSupport.getOutputURI());
+        Dataset dataset = datasetFactory.finish();
+        Scanner scanner = dataset.newScan(options);
+        ArrowReader reader = scanner.scanBatches()
+    ) {
+      assertEquals(schema.getFields(), 
reader.getVectorSchemaRoot().getSchema().getFields());
+      int rowcount = 0;
+      while (reader.loadNextBatch()) {
+        rowcount += reader.getVectorSchemaRoot().getRowCount();
+      }
+      assertEquals(5, rowcount);
+    }
+  }
+
+  @Test
+  public void testBaseParquetReadWithExtendedExpressionsProjectAndFilter() 
throws Exception {

Review Comment:
   I see you added a helper function for base64 encoded filters. I was actually 
thinking of putting most of the test code in this helper function though like 
this (pseudo code):
   
   ```
   private ArrowReader scanParquetFileUsingSubstrait(Optional<String> 
base64EncodedSubstraitFilter, Optional<String> 
base64EncodedSubstraitProjection) throws Exception {
       final Schema schema = new Schema(Arrays.asList(
           Field.nullable("id", new ArrowType.Int(32, true)),
           Field.nullable("name", new ArrowType.Utf8())
       ), null);
       ByteBuffer substraitExpressionFilter = 
getByteBuffer(base64EncodedSubstraitFilter);
       ByteBuffer substraitExpressionProjection = 
getByteBuffer(base64EncodedSubstraitProjection);
       ParquetWriteSupport writeSupport = ParquetWriteSupport
           .writeTempFile(AVRO_SCHEMA_USER, TMP.newFolder(), 19, "value_19", 1, 
"value_1",
               11, "value_11", 21, "value_21", 45, "value_45");
       ScanOptions options = new ScanOptions.Builder(/*batchSize*/ 32768)
           .columns(Optional.empty())
           .substraitFilter(substraitExpressionFilter)
           .substraitProjection(substraitExpressionProjection)
           .build();
       try (
           DatasetFactory datasetFactory = new 
FileSystemDatasetFactory(rootAllocator(), NativeMemoryPool.getDefault(),
               FileFormat.PARQUET, writeSupport.getOutputURI());
           Dataset dataset = datasetFactory.finish();
           Scanner scanner = dataset.newScan(options);
           ArrowReader reader = scanner.scanBatches()
       ) {
         assertEquals(schema.getFields(), 
reader.getVectorSchemaRoot().getSchema().getFields());
         return reader;
       }
     return null; // or raise 
   }
   
   
    @Test
     public void testBaseParquetReadWithExtendedExpressionsFilter() throws 
Exception {
       // Substrait Extended Expression: Filter:
       // Expression 01: WHERE ID < 20
       String base64EncodedSubstraitFilter = 
"Ch4IARIaL2Z1bmN0aW9uc19jb21wYXJpc29uLnlhbWwSEhoQCAIQAhoKbHQ6YW55X2F" +
           
"ueRo3ChwaGggCGgQKAhABIggaBhIECgISACIGGgQKAigUGhdmaWx0ZXJfaWRfbG93ZXJfdGhhbl8yMCIaCgJJRAoETkFNRRIOCgQqAhA"
 +
           "BCgRiAhABGAI=";
       try (
           ArrowReader reader = 
scanParquetFileUsingSubstrait(base64EncodedSubstraitFilter, Optional.empty())
       ) {
         while (reader.loadNextBatch()) {
           
assertTrue(reader.getVectorSchemaRoot().getVector("id").toString().equals("[19, 
1, 11]"));
           assertTrue(reader.getVectorSchemaRoot().getVector("name").toString()
               .equals("[value_19, value_1, value_11]"));
         }
     }
   }
   
    @Test
   ...
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to