Github user gatorsmile commented on a diff in the pull request:
https://github.com/apache/spark/pull/18266#discussion_r138476182
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
---
@@ -768,6 +769,35 @@ object JdbcUtils extends Logging {
}
/**
+ * Parses the user specified customSchema option value to DataFrame
schema,
+ * and returns it if it's all columns are equals to default schema's.
+ */
+ def parseUserSpecifiedColumnTypes(
+ schema: StructType,
+ columnTypes: String,
+ nameEquality: Resolver): StructType = {
+ val userSchema = CatalystSqlParser.parseTableSchema(columnTypes)
+
+ SchemaUtils.checkColumnNameDuplication(
+ userSchema.map(_.name), "in the customSchema option value",
nameEquality)
+
+ if (userSchema.size != schema.size) {
+ throw new AnalysisException("Please provide all the columns, " +
+ s"all columns are: ${schema.fields.map(_.name).mkString(",")}")
+ }
+
+ // This is resolved by names, only check the column names.
+ userSchema.fieldNames.foreach { col =>
+ schema.find(f => nameEquality(f.name, col)).getOrElse {
+ throw new AnalysisException(
+ s"${JDBCOptions.JDBC_CUSTOM_DATAFRAME_COLUMN_TYPES} option
column $col not found in " +
+ s"schema ${schema.catalogString}")
--- End diff --
```Scala
val colNames = tableSchema.fieldNames.mkString(",")
throw new AnalysisException(s"Please provide all the columns, all
columns are: $colNames")
```
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]