devinjdangelo commented on code in PR #7212:
URL: https://github.com/apache/arrow-datafusion/pull/7212#discussion_r1285707042
##########
datafusion/core/src/datasource/listing/table.rs:
##########
@@ -1559,23 +1549,87 @@ mod tests {
}
#[tokio::test]
- async fn test_append_plan_to_external_table_stored_as_csv() -> Result<()> {
- let file_type = FileType::CSV;
- let file_compression_type = FileCompressionType::UNCOMPRESSED;
+ async fn test_insert_into_append_to_json_file() -> Result<()> {
+ helper_test_insert_into_append_to_existing_files(
+ FileType::JSON,
+ FileCompressionType::UNCOMPRESSED,
+ )
+ .await?;
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn test_insert_into_append_new_json_files() -> Result<()> {
+ helper_test_append_new_files_to_table(
+ FileType::JSON,
+ FileCompressionType::UNCOMPRESSED,
+ )
+ .await?;
+ Ok(())
+ }
+ #[tokio::test]
+ async fn test_insert_into_append_to_csv_file() -> Result<()> {
+ helper_test_insert_into_append_to_existing_files(
+ FileType::CSV,
+ FileCompressionType::UNCOMPRESSED,
+ )
+ .await?;
+ Ok(())
+ }
+
+ #[tokio::test]
+ async fn test_insert_into_append_new_csv_files() -> Result<()> {
+ helper_test_append_new_files_to_table(
+ FileType::CSV,
+ FileCompressionType::UNCOMPRESSED,
+ )
+ .await?;
+ Ok(())
+ }
+
+ fn load_empty_schema_table(
+ schema: SchemaRef,
+ temp_path: &str,
+ insert_mode: ListingTableInsertMode,
+ file_format: Arc<dyn FileFormat>,
+ ) -> Result<Arc<dyn TableProvider>> {
+ File::create(temp_path)?;
+ let table_path = ListingTableUrl::parse(temp_path).unwrap();
+
+ let listing_options =
+
ListingOptions::new(file_format.clone()).with_insert_mode(insert_mode);
+
+ let config = ListingTableConfig::new(table_path)
+ .with_listing_options(listing_options)
+ .with_schema(schema);
+
+ let table = ListingTable::try_new(config)?;
+ Ok(Arc::new(table))
+ }
+
+ /// Logic of testing inserting into listing table by Appending to existing
files
+ /// is the same for all formats/options which support this. This helper
allows
+ /// passing different options to execute the same test with different
settings.
+ async fn helper_test_insert_into_append_to_existing_files(
+ file_type: FileType,
+ file_compression_type: FileCompressionType,
+ ) -> Result<()> {
// Create the initial context, schema, and batch.
let session_ctx = SessionContext::new();
// Create a new schema with one field called "a" of type Int32
let schema = Arc::new(Schema::new(vec![Field::new(
"column1",
- DataType::Int32,
+ DataType::Float64,
Review Comment:
I made this change while debugging trying to make the test work for JSON. I
just double checked and this change is not needed for the test to work properly
for JSON. I reverted this change back to Int32.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]