Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12412#discussion_r60345544
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
 ---
    @@ -935,6 +935,104 @@ class HiveQuerySuite extends HiveComparisonTest with 
BeforeAndAfter {
         sql("DROP TABLE t1")
       }
     
    +  test("LOAD DATA") {
    +    sql(
    +      """
    +        |CREATE EXTERNAL TABLE non_part_table (time TIMESTAMP, id INT)
    +        |ROW FORMAT DELIMITED
    +        |FIELDS TERMINATED BY ','
    +        |LINES TERMINATED BY '\n'
    +      """.stripMargin)
    +
    +    val testData = 
TestHive.getHiveFile("data/files/issue-4077-data.txt").getCanonicalPath
    +
    +    // LOAD DATA INTO non-partitioned table can't specify partition
    +    intercept[AnalysisException] {
    +      sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE non_part_table 
PARTITION(ds="1")""")
    +    }
    +
    +    sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE 
non_part_table""")
    +    assert(sql("SELECT time FROM non_part_table LIMIT 2").collect()
    +      === Array(Row(java.sql.Timestamp.valueOf("2014-12-11 00:00:00")), 
Row(null)))
    +
    +    sql(
    +      """
    +        |CREATE EXTERNAL TABLE part_table (time TIMESTAMP, id INT)
    +        |PARTITIONED BY (c STRING, d STRING)
    +        |ROW FORMAT DELIMITED
    +        |FIELDS TERMINATED BY ','
    +        |LINES TERMINATED BY '\n'
    +      """.stripMargin)
    +
    +    // LOAD DATA INTO partitioned table must specify partition
    +    intercept[AnalysisException] {
    +      sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table""")
    +    }
    +
    +    intercept[AnalysisException] {
    +      sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table 
PARTITION(c="1")""")
    +    }
    +    intercept[AnalysisException] {
    +      sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table 
PARTITION(d="1")""")
    +    }
    +    intercept[AnalysisException] {
    +      sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table 
PARTITION(c="1", k="2")""")
    +    }
    +
    +    sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table 
PARTITION(c="1", d="2")""")
    +    assert(sql("SELECT time, id FROM part_table WHERE c = '1' AND d = 
'2'").collect()
    +      === sql("SELECT * FROM non_part_table").collect())
    +
    +    // Different order of partition columns.
    +    sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table 
PARTITION(d="1", c="2")""")
    +    assert(sql("SELECT time, id FROM part_table WHERE c = '2' AND d = 
'1'").collect()
    +      === sql("SELECT * FROM non_part_table").collect())
    +
    +    sql("DROP TABLE non_part_table")
    +    sql("DROP TABLE part_table")
    +  }
    +
    +  test("LOAD DATA: input path") {
    +    sql(
    +      """
    +        |CREATE EXTERNAL TABLE non_part_table (time TIMESTAMP, id INT)
    +        |ROW FORMAT DELIMITED
    +        |FIELDS TERMINATED BY ','
    +        |LINES TERMINATED BY '\n'
    +      """.stripMargin)
    +
    +    // Non-existing inpath
    +    intercept[AnalysisException] {
    +      sql("""LOAD DATA LOCAL INPATH "/non-existing/data.txt" INTO TABLE 
non_part_table""")
    +    }
    +
    +    val testData = 
TestHive.getHiveFile("data/files/issue-4077-data.txt").getCanonicalPath
    +
    +    // Non-local inpath: without URI Scheme and Authority
    +    sql(s"""LOAD DATA INPATH "$testData" INTO TABLE non_part_table""")
    +
    +    assert(sql("SELECT time FROM non_part_table LIMIT 2").collect()
    --- End diff --
    
    `checkAnswer`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to