alamb commented on code in PR #6262:
URL: https://github.com/apache/arrow-rs/pull/6262#discussion_r1719064615


##########
parquet/tests/arrow_reader/bad_data.rs:
##########
@@ -0,0 +1,125 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! Tests that reading invalid parquet files returns an error
+
+use arrow::util::test_util::parquet_test_data;
+use parquet::arrow::arrow_reader::ArrowReaderBuilder;
+use parquet::errors::ParquetError;
+use std::collections::HashSet;
+use std::path::PathBuf;
+
+static KNOWN_FILES: &[&str] = &[
+    "PARQUET-1481.parquet",
+    "ARROW-GH-41321.parquet",
+    "ARROW-RS-GH-6229-DICTHEADER.parquet",
+    "ARROW-RS-GH-6229-LEVELS.parquet",
+    "README.md",
+];
+
+/// Returns the path to 'parquet-testing/bad_data'
+fn bad_data_dir() -> PathBuf {
+    // points to parquet-testing/data
+    let parquet_testing_data = parquet_test_data();
+    PathBuf::from(parquet_testing_data)
+        .parent()
+        .expect("was in parquet-testing/data")
+        .join("bad_data")
+}
+
+#[test]
+// Ensure that if we add a new test the files are added to the tests.
+fn test_invalid_files() {
+    let known_files: HashSet<_> = KNOWN_FILES.iter().cloned().collect();
+    let mut seen_files = HashSet::new();
+
+    let files = std::fs::read_dir(bad_data_dir()).unwrap();
+
+    for file in files {
+        let file_name = file
+            .unwrap()
+            .path()
+            .file_name()
+            .unwrap()
+            .to_str()
+            .unwrap()
+            .to_string();
+        // If you see this error, please add a test for the new file following 
the model below
+        assert!(
+            known_files.contains(file_name.as_str()),
+            "Found new file in bad_data, please add test: {file_name}"
+        );
+        seen_files.insert(file_name);
+    }
+    for expected_file in known_files {
+        assert!(
+            seen_files.contains(expected_file),
+            "Expected file not found in bad_data directory: {expected_file}"
+        );
+    }
+}
+
+#[test]
+fn test_parquet_1481() {
+    let err = read_file("PARQUET-1481.parquet").unwrap_err();
+    assert_eq!(
+        err.to_string(),
+        "Parquet error: unexpected parquet type: -7"
+    );
+}
+
+#[test]
+#[should_panic(expected = "assertion failed: self.current_value.is_some()")]
+fn test_arrow_gh_41321() {
+    let err = read_file("ARROW-GH-41321.parquet").unwrap_err();
+    assert_eq!(err.to_string(), "TBD (currently panics)");
+}
+
+#[test]
+fn test_arrow_rs_gh_6229_dict_header() {
+    let err = read_file("ARROW-RS-GH-6229-DICTHEADER.parquet").unwrap_err();
+    assert_eq!(
+        err.to_string(),
+        "External: Parquet argument error: EOF: eof decoding byte array"
+    );
+}
+
+#[test]
+fn test_arrow_rs_gh_6229_dict_levels() {

Review Comment:
   Prior to https://github.com/apache/arrow-rs/pull/6232 this test generated an 
infinite loop



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@arrow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to