alamb commented on code in PR #15578:
URL: https://github.com/apache/datafusion/pull/15578#discussion_r2030152111


##########
datafusion/sql/tests/cases/diagnostic.rs:
##########
@@ -136,7 +137,7 @@ fn test_table_not_found() -> Result<()> {
     let query = "SELECT * FROM /*a*/personx/*a*/";
     let spans = get_spans(query);
     let diag = do_query(query);
-    assert_eq!(diag.message, "table 'personx' not found");
+    assert_snapshot!(diag.message, @"table 'personx' not found");

Review Comment:
   This is fine though single string comparisons are probably fine to just 
leave as assert_eq in the future



##########
datafusion/sql/tests/cases/plan_to_sql.rs:
##########
@@ -62,46 +63,44 @@ use sqlparser::dialect::{Dialect, GenericDialect, 
MySqlDialect};
 use sqlparser::parser::Parser;
 
 #[test]
-fn roundtrip_expr() {
-    let tests: Vec<(TableReference, &str, &str)> = vec![
-        (TableReference::bare("person"), "age > 35", r#"(age > 35)"#),
-        (
-            TableReference::bare("person"),
-            "id = '10'",
-            r#"(id = '10')"#,
-        ),
-        (
-            TableReference::bare("person"),
-            "CAST(id AS VARCHAR)",
-            r#"CAST(id AS VARCHAR)"#,
-        ),
-        (
-            TableReference::bare("person"),
-            "sum((age * 2))",
-            r#"sum((age * 2))"#,
-        ),
-    ];
+fn test_roundtrip_expr_1() {

Review Comment:
   this looks good



##########
datafusion/sql/tests/cases/plan_to_sql.rs:
##########
@@ -1284,10 +1446,10 @@ fn test_join_with_table_scan_filters() -> Result<()> {
         .build()?;
 
     let sql = plan_to_sql(&join_plan_with_filter)?;

Review Comment:
   this is realyl nice



##########
datafusion/sql/tests/cases/plan_to_sql.rs:
##########
@@ -726,121 +722,276 @@ fn test_aggregation_without_projection() -> Result<()> {
 
     let unparser = Unparser::default();
     let statement = unparser.plan_to_sql(&plan)?;
-
-    let actual = &statement.to_string();
-
-    assert_eq!(
-        actual,
-        r#"SELECT sum(users.age), users."name" FROM users GROUP BY 
users."name""#
+    assert_snapshot!(
+        statement,
+        @r#"SELECT sum(users.age), users."name" FROM users GROUP BY 
users."name""#
     );
 
     Ok(())
 }
 
 #[test]
-fn test_table_references_in_plan_to_sql() {
-    fn test(table_name: &str, expected_sql: &str, dialect: &impl 
UnparserDialect) {
-        let schema = Schema::new(vec![
-            Field::new("id", DataType::Utf8, false),
-            Field::new("value", DataType::Utf8, false),
-        ]);
-        let plan = table_scan(Some(table_name), &schema, None)
-            .unwrap()
-            .project(vec![col("id"), col("value")])
-            .unwrap()
-            .build()
-            .unwrap();
-
-        let unparser = Unparser::new(dialect);
-        let sql = unparser.plan_to_sql(&plan).unwrap();
-
-        assert_eq!(sql.to_string(), expected_sql)
-    }
-
-    test(
-        "catalog.schema.table",
-        r#"SELECT "table".id, "table"."value" FROM 
"catalog"."schema"."table""#,
+fn test_table_references_in_plan_to_sql_1() {
+    let table_name = "catalog.schema.table";

Review Comment:
   would it be possible to reduce the replication here of recreating the schema 
so many times?
   
   Something like 
   ```rust
   /// return a schema with two string columns: "id" and "value"
   fn test_schema() -> Schema {
   Schema::new(vec![
           Field::new("id", DataType::Utf8, false),
           Field::new("value", DataType::Utf8, false),
       ]);
   }
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org
For additional commands, e-mail: github-h...@datafusion.apache.org

Reply via email to