Github user HyukjinKwon commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20894#discussion_r189065444
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
 ---
    @@ -1368,4 +1370,123 @@ class CSVSuite extends QueryTest with 
SharedSQLContext with SQLTestUtils with Te
           checkAnswer(computed, expected)
         }
       }
    +
    +  def checkHeader(multiLine: Boolean): Unit = {
    +    withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
    +      withTempPath { path =>
    +        val oschema = new StructType().add("f1", DoubleType).add("f2", 
DoubleType)
    +        val odf = spark.createDataFrame(List(Row(1.0, 1234.5)).asJava, 
oschema)
    +        odf.write.option("header", true).csv(path.getCanonicalPath)
    +        val ischema = new StructType().add("f2", DoubleType).add("f1", 
DoubleType)
    +        val exception = intercept[SparkException] {
    +          spark.read
    +            .schema(ischema)
    +            .option("multiLine", multiLine)
    +            .option("header", true)
    +            .option("enforceSchema", false)
    +            .csv(path.getCanonicalPath)
    +            .collect()
    +        }
    +        assert(exception.getMessage.contains(
    +          "CSV header is not conform to the schema"
    +        ))
    +
    +        val shortSchema = new StructType().add("f1", DoubleType)
    +        val exceptionForShortSchema = intercept[SparkException] {
    +          spark.read
    +            .schema(shortSchema)
    +            .option("multiLine", multiLine)
    +            .option("header", true)
    +            .option("enforceSchema", false)
    +            .csv(path.getCanonicalPath)
    +            .collect()
    +        }
    +        assert(exceptionForShortSchema.getMessage.contains(
    +          "Number of column in CSV header is not equal to number of fields 
in the schema"
    +        ))
    --- End diff --
    
    ditto for inlining


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to