maxdebayser commented on code in PR #7831:
URL: https://github.com/apache/iceberg/pull/7831#discussion_r1286039886


##########
python/tests/io/test_pyarrow.py:
##########
@@ -1345,3 +1374,655 @@ def test_pyarrow_wrap_fsspec(example_task: 
FileScanTask, table_schema_simple: Sc
 bar: [[1,2,3]]
 baz: [[true,false,null]]"""
     )
+
+
+def construct_test_table() -> Tuple[Any, Any, Union[TableMetadataV1, 
TableMetadataV2]]:
+    table_metadata = {
+        "format-version": 2,
+        "location": "s3://bucket/test/location",
+        "last-column-id": 7,
+        "current-schema-id": 0,
+        "schemas": [
+            {
+                "type": "struct",
+                "schema-id": 0,
+                "fields": [
+                    {"id": 1, "name": "strings", "required": False, "type": 
"string"},
+                    {"id": 2, "name": "floats", "required": False, "type": 
"float"},
+                    {
+                        "id": 3,
+                        "name": "list",
+                        "required": False,
+                        "type": {"type": "list", "element-id": 5, "element": 
"long", "element-required": False},
+                    },
+                    {
+                        "id": 4,
+                        "name": "maps",
+                        "required": False,
+                        "type": {
+                            "type": "map",
+                            "key-id": 6,
+                            "key": "long",
+                            "value-id": 7,
+                            "value": "long",
+                            "value-required": False,
+                        },
+                    },
+                ],
+            },
+        ],
+        "default-spec-id": 0,
+        "partition-specs": [{"spec-id": 0, "fields": []}],
+        "properties": {},
+    }
+
+    table_metadata = TableMetadataUtil.parse_obj(table_metadata)
+    arrow_schema = schema_to_pyarrow(table_metadata.schemas[0])
+
+    _strings = ["zzzzzzzzzzzzzzzzzzzz", "rrrrrrrrrrrrrrrrrrrr", None, 
"aaaaaaaaaaaaaaaaaaaa"]
+
+    _floats = [3.14, math.nan, 1.69, 100]
+
+    _list = [[1, 2, 3], [4, 5, 6], None, [7, 8, 9]]
+
+    _maps: List[Optional[Dict[int, int]]] = [
+        {1: 2, 3: 4},
+        None,
+        {5: 6},
+        {},
+    ]
+
+    table = pa.Table.from_pydict(
+        {
+            "strings": _strings,
+            "floats": _floats,
+            "list": _list,
+            "maps": _maps,
+        },
+        schema=arrow_schema,
+    )
+    metadata_collector: List[Any] = []
+
+    with pa.BufferOutputStream() as f:
+        with pq.ParquetWriter(f, table.schema, 
metadata_collector=metadata_collector) as writer:
+            writer.write_table(table)
+
+        return f.getvalue(), metadata_collector[0], table_metadata
+
+
+def test_record_count() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    fill_parquet_file_metadata(datafile, metadata, len(file_bytes), 
table_metadata)
+
+    assert datafile.record_count == 4
+
+
+def test_file_size() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    fill_parquet_file_metadata(datafile, metadata, len(file_bytes), 
table_metadata)
+
+    assert datafile.file_size_in_bytes == len(file_bytes)
+
+
+def test_value_counts() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    fill_parquet_file_metadata(datafile, metadata, len(file_bytes), 
table_metadata)
+
+    assert len(datafile.value_counts) == 5
+    assert datafile.value_counts[1] == 4
+    assert datafile.value_counts[2] == 4
+    assert datafile.value_counts[5] == 10  # 3 lists with 3 items and a None 
value
+    assert datafile.value_counts[6] == 5
+    assert datafile.value_counts[7] == 5
+
+
+def test_column_sizes() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    fill_parquet_file_metadata(datafile, metadata, len(file_bytes), 
table_metadata)
+
+    assert len(datafile.column_sizes) == 5
+    # these values are an artifact of how the write_table encodes the columns
+    assert datafile.column_sizes[1] == 116
+    assert datafile.column_sizes[2] == 89
+    assert datafile.column_sizes[5] == 151
+    assert datafile.column_sizes[6] == 117
+    assert datafile.column_sizes[7] == 117
+
+
+def test_null_and_nan_counts() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    fill_parquet_file_metadata(datafile, metadata, len(file_bytes), 
table_metadata)
+
+    assert len(datafile.null_value_counts) == 5
+    assert datafile.null_value_counts[1] == 1
+    assert datafile.null_value_counts[2] == 0
+    assert datafile.null_value_counts[5] == 1
+    assert datafile.null_value_counts[6] == 2
+    assert datafile.null_value_counts[7] == 2
+
+    # #arrow does not include this in the statistics
+    # assert len(datafile.nan_value_counts)  == 3
+    # assert datafile.nan_value_counts[1]    == 0
+    # assert datafile.nan_value_counts[2]    == 1
+    # assert datafile.nan_value_counts[3]    == 0
+
+
+def test_bounds() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    fill_parquet_file_metadata(datafile, metadata, len(file_bytes), 
table_metadata)
+
+    assert len(datafile.lower_bounds) == 5
+    assert datafile.lower_bounds[1].decode() == "aaaaaaaaaaaaaaaa"
+    assert datafile.lower_bounds[2] == STRUCT_FLOAT.pack(1.69)
+    assert datafile.lower_bounds[5] == STRUCT_INT64.pack(1)
+    assert datafile.lower_bounds[6] == STRUCT_INT64.pack(1)
+    assert datafile.lower_bounds[7] == STRUCT_INT64.pack(2)
+
+    assert len(datafile.upper_bounds) == 5
+    assert datafile.upper_bounds[1].decode() == "zzzzzzzzzzzzzzz{"
+    assert datafile.upper_bounds[2] == STRUCT_FLOAT.pack(100)
+    assert datafile.upper_bounds[5] == STRUCT_INT64.pack(9)
+    assert datafile.upper_bounds[6] == STRUCT_INT64.pack(5)
+    assert datafile.upper_bounds[7] == STRUCT_INT64.pack(6)
+
+
+def test_metrics_mode_parsing() -> None:
+    assert match_metrics_mode("none") == MetricsMode(MetricModeTypes.NONE)
+    assert match_metrics_mode("nOnE") == MetricsMode(MetricModeTypes.NONE)
+    assert match_metrics_mode("counts") == MetricsMode(MetricModeTypes.COUNTS)
+    assert match_metrics_mode("Counts") == MetricsMode(MetricModeTypes.COUNTS)
+    assert match_metrics_mode("full") == MetricsMode(MetricModeTypes.FULL)
+    assert match_metrics_mode("FuLl") == MetricsMode(MetricModeTypes.FULL)
+
+    with pytest.raises(ValueError) as exc_info:
+        match_metrics_mode(" Full")
+    assert "Unsupported metrics mode:  Full" in str(exc_info.value)
+
+    assert match_metrics_mode("truncate(16)") == 
MetricsMode(MetricModeTypes.TRUNCATE, 16)
+    assert match_metrics_mode("trUncatE(16)") == 
MetricsMode(MetricModeTypes.TRUNCATE, 16)
+    assert match_metrics_mode("trUncatE(7)") == 
MetricsMode(MetricModeTypes.TRUNCATE, 7)
+    assert match_metrics_mode("trUncatE(07)") == 
MetricsMode(MetricModeTypes.TRUNCATE, 7)
+
+    with pytest.raises(ValueError) as exc_info:
+        match_metrics_mode("trUncatE(-7)")
+    assert "Malformed truncate: trUncatE(-7)" in str(exc_info.value)
+
+    with pytest.raises(ValueError) as exc_info:
+        match_metrics_mode("trUncatE(0)")
+    assert "Truncation length must be larger than 0" in str(exc_info.value)
+
+
+def test_metrics_mode_none() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    table_metadata.properties["write.metadata.metrics.default"] = "none"
+    fill_parquet_file_metadata(
+        datafile,
+        metadata,
+        len(file_bytes),
+        table_metadata,
+    )
+
+    assert len(datafile.value_counts) == 0
+    assert len(datafile.null_value_counts) == 0
+    assert len(datafile.nan_value_counts) == 0
+    assert len(datafile.lower_bounds) == 0
+    assert len(datafile.upper_bounds) == 0
+
+
+def test_metrics_mode_counts() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    table_metadata.properties["write.metadata.metrics.default"] = "counts"
+    fill_parquet_file_metadata(
+        datafile,
+        metadata,
+        len(file_bytes),
+        table_metadata,
+    )
+
+    assert len(datafile.value_counts) == 5
+    assert len(datafile.null_value_counts) == 5
+    assert len(datafile.nan_value_counts) == 0
+    assert len(datafile.lower_bounds) == 0
+    assert len(datafile.upper_bounds) == 0
+
+
+def test_metrics_mode_full() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    table_metadata.properties["write.metadata.metrics.default"] = "full"
+    fill_parquet_file_metadata(
+        datafile,
+        metadata,
+        len(file_bytes),
+        table_metadata,
+    )
+
+    assert len(datafile.value_counts) == 5
+    assert len(datafile.null_value_counts) == 5
+    assert len(datafile.nan_value_counts) == 0
+
+    assert len(datafile.lower_bounds) == 5
+    assert datafile.lower_bounds[1].decode() == "aaaaaaaaaaaaaaaaaaaa"
+    assert datafile.lower_bounds[2] == STRUCT_FLOAT.pack(1.69)
+    assert datafile.lower_bounds[5] == STRUCT_INT64.pack(1)
+    assert datafile.lower_bounds[6] == STRUCT_INT64.pack(1)
+    assert datafile.lower_bounds[7] == STRUCT_INT64.pack(2)
+
+    assert len(datafile.upper_bounds) == 5
+    assert datafile.upper_bounds[1].decode() == "zzzzzzzzzzzzzzzzzzzz"
+    assert datafile.upper_bounds[2] == STRUCT_FLOAT.pack(100)
+    assert datafile.upper_bounds[5] == STRUCT_INT64.pack(9)
+    assert datafile.upper_bounds[6] == STRUCT_INT64.pack(5)
+    assert datafile.upper_bounds[7] == STRUCT_INT64.pack(6)
+
+
+def test_metrics_mode_non_default_trunc() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    table_metadata.properties["write.metadata.metrics.default"] = "truncate(2)"
+    fill_parquet_file_metadata(
+        datafile,
+        metadata,
+        len(file_bytes),
+        table_metadata,
+    )
+
+    assert len(datafile.value_counts) == 5
+    assert len(datafile.null_value_counts) == 5
+    assert len(datafile.nan_value_counts) == 0
+
+    assert len(datafile.lower_bounds) == 5
+    assert datafile.lower_bounds[1].decode() == "aa"
+    assert datafile.lower_bounds[2] == STRUCT_FLOAT.pack(1.69)
+    assert datafile.lower_bounds[5] == STRUCT_INT64.pack(1)
+    assert datafile.lower_bounds[6] == STRUCT_INT64.pack(1)
+    assert datafile.lower_bounds[7] == STRUCT_INT64.pack(2)
+
+    assert len(datafile.upper_bounds) == 5
+    assert datafile.upper_bounds[1].decode() == "z{"
+    assert datafile.upper_bounds[2] == STRUCT_FLOAT.pack(100)
+    assert datafile.upper_bounds[5] == STRUCT_INT64.pack(9)
+    assert datafile.upper_bounds[6] == STRUCT_INT64.pack(5)
+    assert datafile.upper_bounds[7] == STRUCT_INT64.pack(6)
+
+
+def test_column_metrics_mode() -> None:
+    (file_bytes, metadata, table_metadata) = construct_test_table()
+
+    datafile = DataFile()
+    table_metadata.properties["write.metadata.metrics.default"] = "truncate(2)"
+    table_metadata.properties["write.metadata.metrics.column.strings"] = "none"
+    table_metadata.properties["write.metadata.metrics.column.list.element"] = 
"counts"
+    fill_parquet_file_metadata(
+        datafile,
+        metadata,
+        len(file_bytes),
+        table_metadata,
+    )
+
+    assert len(datafile.value_counts) == 4
+    assert len(datafile.null_value_counts) == 4
+    assert len(datafile.nan_value_counts) == 0
+
+    assert len(datafile.lower_bounds) == 3
+    assert datafile.lower_bounds[2] == STRUCT_FLOAT.pack(1.69)
+    assert 5 not in datafile.lower_bounds

Review Comment:
   Wait, there's no integer being truncated here.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to