Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/18975#discussion_r137461450
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertSuite.scala ---
    @@ -534,4 +534,150 @@ class InsertIntoHiveTableSuite extends QueryTest with 
TestHiveSingleton with Bef
           }
         }
       }
    +
    +  test("insert overwrite to dir from hive metastore table") {
    +    withTempDir { dir =>
    +      val path = dir.toURI.getPath
    +
    +      sql(s"INSERT OVERWRITE LOCAL DIRECTORY '${path}' SELECT * FROM src 
where key < 10")
    +
    +      sql(
    +        s"""
    +           |INSERT OVERWRITE LOCAL DIRECTORY '${path}'
    +           |STORED AS orc
    +           |SELECT * FROM src where key < 10
    +         """.stripMargin)
    +
    +      // use orc data source to check the data of path is right.
    +      withTempView("orc_source") {
    +        sql(
    +          s"""
    +             |CREATE TEMPORARY VIEW orc_source
    +             |USING org.apache.spark.sql.hive.orc
    +             |OPTIONS (
    +             |  PATH '${dir.getCanonicalPath}'
    +             |)
    +           """.stripMargin)
    +
    +        checkAnswer(
    +          sql("select * from orc_source"),
    +          sql("select * from src where key < 10"))
    +      }
    +    }
    +  }
    +
    +  test("insert overwrite to local dir from temp table") {
    +    withTempView("test_insert_table") {
    +      spark.range(10).selectExpr("id", "id AS 
str").createOrReplaceTempView("test_insert_table")
    +
    +      withTempDir { dir =>
    +        val path = dir.toURI.getPath
    +
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE LOCAL DIRECTORY '${path}'
    +             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
    +             |SELECT * FROM test_insert_table
    +           """.stripMargin)
    +
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE LOCAL DIRECTORY '${path}'
    +             |STORED AS orc
    +             |SELECT * FROM test_insert_table
    +           """.stripMargin)
    +
    +        // use orc data source to check the data of path is right.
    +        checkAnswer(
    +          spark.read.orc(dir.getCanonicalPath),
    +          sql("select * from test_insert_table"))
    +      }
    +    }
    +  }
    +
    +  test("insert overwrite to dir from temp table") {
    +    withTempView("test_insert_table") {
    +      spark.range(10).selectExpr("id", "id AS 
str").createOrReplaceTempView("test_insert_table")
    +
    +      withTempDir { dir =>
    +        val pathUri = dir.toURI
    +
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE DIRECTORY '${pathUri}'
    +             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
    +             |SELECT * FROM test_insert_table
    +           """.stripMargin)
    +
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE DIRECTORY '${pathUri}'
    +             |STORED AS orc
    +             |SELECT * FROM test_insert_table
    +           """.stripMargin)
    +
    +        // use orc data source to check the data of path is right.
    +        checkAnswer(
    +          spark.read.orc(dir.getCanonicalPath),
    +          sql("select * from test_insert_table"))
    +      }
    +    }
    +  }
    +
    +  test("insert overwrite to dir to illegal path") {
    +    withTempView("test_insert_table") {
    +      spark.range(10).selectExpr("id", "id AS 
str").createOrReplaceTempView("test_insert_table")
    +
    +      val e = intercept[IllegalArgumentException] {
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE LOCAL DIRECTORY 'abc://a'
    +             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
    +             |SELECT * FROM test_insert_table
    +           """.stripMargin)
    +      }.getMessage
    +
    +      assert(e.contains("Wrong FS: abc://a, expected: file:///"))
    +    }
    +  }
    +
    +  test("insert overwrite to dir with mixed syntax") {
    +    withTempView("test_insert_table") {
    +      spark.range(10).selectExpr("id", "id AS 
str").createOrReplaceTempView("test_insert_table")
    +
    +      val e = intercept[ParseException] {
    +        sql(
    +          s"""
    +             |INSERT OVERWRITE DIRECTORY 'file://tmp'
    +             |USING json
    +             |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
    +             |SELECT * FROM test_insert_table
    +           """.stripMargin)
    +      }.getMessage
    +
    +      assert(e.contains("mismatched input 'ROW'"))
    +    }
    +  }
    +
    +  test("insert overwrite to dir with multi inserts") {
    --- End diff --
    
    ```
    Hive extension (multiple inserts):
    FROM from_statement
    INSERT OVERWRITE [LOCAL] DIRECTORY directory1 select_statement1
    [INSERT OVERWRITE [LOCAL] DIRECTORY directory2 select_statement2] ...
    ```
    
    Could you add a test case like this?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to