Github user gatorsmile commented on a diff in the pull request: https://github.com/apache/spark/pull/20894#discussion_r178578327 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVDataSource.scala --- @@ -118,6 +120,61 @@ object CSVDataSource { TextInputCSVDataSource } } + + def checkHeaderColumnNames( + schema: StructType, + columnNames: Array[String], + fileName: String, + checkHeaderFlag: Boolean + ): Unit = { + if (checkHeaderFlag && columnNames != null) { + val fieldNames = schema.map(_.name).toIndexedSeq + val (headerLen, schemaSize) = (columnNames.size, fieldNames.length) + var error: Option[String] = None + + if (headerLen == schemaSize) { + var i = 0 + while (error.isEmpty && i < headerLen) { + val nameInSchema = fieldNames(i).toLowerCase + val nameInHeader = columnNames(i).toLowerCase + if (nameInHeader != nameInSchema) { + error = Some( + s"""|CSV file header does not contain the expected fields. + | Header: ${columnNames.mkString(", ")} + | Schema: ${fieldNames.mkString(", ")} + |Expected: $nameInSchema but found: $nameInHeader + |CSV file: $fileName""".stripMargin + ) + } + i += 1 + } + } else { + error = Some( + s"""|Number of column in CSV header is not equal to number of fields in the schema: + | Header length: $headerLen, schema size: $schemaSize + |CSV file: $fileName""".stripMargin + ) + } + + error.headOption.foreach { msg => + throw new IllegalArgumentException(msg) + } + } + } + + def checkHeader( + header: String, + parser: CsvParser, + schema: StructType, + fileName: String, + checkHeaderFlag: Boolean + ): Unit = { --- End diff -- The same here.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org