liukun4515 commented on code in PR #2960:
URL: https://github.com/apache/arrow-datafusion/pull/2960#discussion_r928940950


##########
datafusion/core/src/physical_optimizer/pruning.rs:
##########
@@ -1418,6 +1452,74 @@ mod tests {
         Ok(())
     }
 
+    #[test]
+    fn prune_decimal_data() {
+        // decimal(9,2)
+        let schema = Arc::new(Schema::new(vec![Field::new(
+            "s1",
+            DataType::Decimal(9, 2),
+            true,
+        )]));
+        // s1 > 5
+        let expr = col("s1").gt(lit(ScalarValue::Decimal128(Some(500), 9, 2)));
+        // If the data is written by spark, the physical data type is INT32 in 
the parquet
+        // So we use the INT32 type of statistic.
+        let statistics = TestStatistics::new().with(
+            "s1",
+            ContainerStats::new_i32(
+                vec![Some(0), Some(4), None, Some(3)], // min
+                vec![Some(5), Some(6), Some(4), None], // max
+            ),
+        );
+        let p = PruningPredicate::try_new(expr, schema).unwrap();
+        let result = p.prune(&statistics).unwrap();
+        let expected = vec![false, true, false, true];
+        assert_eq!(result, expected);
+
+        // decimal(18,2)
+        let schema = Arc::new(Schema::new(vec![Field::new(
+            "s1",
+            DataType::Decimal(18, 2),
+            true,
+        )]));
+        // s1 > 5
+        let expr = col("s1").gt(lit(ScalarValue::Decimal128(Some(500), 18, 
2)));
+        // If the data is written by spark, the physical data type is INT64 in 
the parquet
+        // So we use the INT32 type of statistic.
+        let statistics = TestStatistics::new().with(
+            "s1",
+            ContainerStats::new_i64(
+                vec![Some(0), Some(4), None, Some(3)], // min
+                vec![Some(5), Some(6), Some(4), None], // max
+            ),
+        );
+        let p = PruningPredicate::try_new(expr, schema).unwrap();
+        let result = p.prune(&statistics).unwrap();
+        let expected = vec![false, true, false, true];
+        assert_eq!(result, expected);
+
+        // decimal(23,2)
+        let schema = Arc::new(Schema::new(vec![Field::new(
+            "s1",
+            DataType::Decimal(23, 2),
+            true,
+        )]));
+        // s1 > 5
+        let expr = col("s1").gt(lit(ScalarValue::Decimal128(Some(500), 23, 
2)));
+        let statistics = TestStatistics::new().with(
+            "s1",
+            ContainerStats::new_decimal128(
+                vec![Some(0), Some(400), None, Some(300)], // min
+                vec![Some(500), Some(600), Some(400), None], // max
+                23,
+                2,
+            ),
+        );

Review Comment:
   This test case is just used to test the pruning logic.
   I will file follow-up pull request to fix the 
https://github.com/apache/arrow-datafusion/issues/2962 with parquet rowgroup 
filter/prune.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to