Github user cloud-fan commented on a diff in the pull request:
https://github.com/apache/spark/pull/15996#discussion_r92598769
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala ---
@@ -363,48 +365,125 @@ final class DataFrameWriter[T] private[sql](ds:
Dataset[T]) {
throw new AnalysisException("Cannot create hive serde table with
saveAsTable API")
}
- val tableExists =
df.sparkSession.sessionState.catalog.tableExists(tableIdent)
-
- (tableExists, mode) match {
- case (true, SaveMode.Ignore) =>
- // Do nothing
-
- case (true, SaveMode.ErrorIfExists) =>
- throw new AnalysisException(s"Table $tableIdent already exists.")
-
- case _ =>
- val existingTable = if (tableExists) {
-
Some(df.sparkSession.sessionState.catalog.getTableMetadata(tableIdent))
- } else {
- None
- }
- val storage = if (tableExists) {
- existingTable.get.storage
- } else {
- DataSource.buildStorageFormatFromOptions(extraOptions.toMap)
- }
- val tableType = if (tableExists) {
- existingTable.get.tableType
- } else if (storage.locationUri.isDefined) {
- CatalogTableType.EXTERNAL
- } else {
- CatalogTableType.MANAGED
+ val catalog = df.sparkSession.sessionState.catalog
+ val db = tableIdent.database.getOrElse(catalog.getCurrentDatabase)
+ val tableIdentWithDB = tableIdent.copy(database = Some(db))
+ val tableName = tableIdent.unquotedString
+
+ catalog.getTableMetadataOption(tableIdent) match {
+ // If the table already exists...
+ case Some(tableMeta) =>
+ mode match {
+ case SaveMode.Ignore => // Do nothing
+
+ case SaveMode.ErrorIfExists =>
+ throw new AnalysisException(s"Table $tableName already exists.
You can set SaveMode " +
+ "to SaveMode.Append to insert data into the table or set
SaveMode to " +
+ "SaveMode.Overwrite to overwrite the existing data.")
+
+ case SaveMode.Append =>
+ // Check if the specified data source match the data source of
the existing table.
+ val specifiedProvider = DataSource.lookupDataSource(source)
+ // TODO: Check that options from the resolved relation match
the relation that we are
+ // inserting into (i.e. using the same compression).
+
+ // Pass a table identifier with database part, so that
`lookupRelation` won't get temp
+ // views unexpectedly.
+
EliminateSubqueryAliases(catalog.lookupRelation(tableIdentWithDB)) match {
+ case l @ LogicalRelation(_: InsertableRelation | _:
HadoopFsRelation, _, _) =>
+ // check if the file formats match
+ l.relation match {
+ case r: HadoopFsRelation if r.fileFormat.getClass !=
specifiedProvider =>
+ throw new AnalysisException(
+ s"The file format of the existing table $tableName
is " +
+ s"`${r.fileFormat.getClass.getName}`. It doesn't
match the specified " +
+ s"format `$source`")
+ case _ =>
+ }
+ case s: SimpleCatalogRelation if
DDLUtils.isDatasourceTable(s.metadata) => // OK.
+ case c: CatalogRelation if c.catalogTable.provider ==
Some(DDLUtils.HIVE_PROVIDER) =>
+ throw new AnalysisException(s"Saving data in the Hive
serde table $tableName " +
+ s"is not supported yet. Please use the insertInto() API
as an alternative.")
+ case o =>
+ throw new AnalysisException(s"Saving data in ${o.toString}
is not supported.")
+ }
+
+ val existingSchema = tableMeta.schema
+ if (df.logicalPlan.schema.size != existingSchema.size) {
+ throw new AnalysisException(
+ s"The column number of the existing table $tableName" +
+ s"(${existingSchema.catalogString}) doesn't match the
data schema" +
+ s"(${df.logicalPlan.schema.catalogString})")
+ }
+
+ if (partitioningColumns.isDefined) {
+ logWarning("append to an existing table, the specified
partition columns " +
+ s"[${partitioningColumns.get.mkString(", ")}] will be
ignored.")
+ }
+
+ val specifiedBucketSpec = getBucketSpec
+ if (specifiedBucketSpec.isDefined) {
+ logWarning("append to an existing table, the specified
bucketing " +
+ s"(${specifiedBucketSpec.get}) will be ignored.")
+ }
+
+ // Because we are inserting into an existing table, we should
respect the existing
+ // schema and adjust columns order of the given dataframe
according to it.
+ val adjustedColumns = existingSchema.map(f =>
Column(df.resolve(f.name)))
+ df.select(adjustedColumns:
_*).write.insertInto(tableIdentWithDB)
+
+ case SaveMode.Overwrite =>
+
EliminateSubqueryAliases(catalog.lookupRelation(tableIdentWithDB)) match {
+ // Only do the check if the table is a data source table
+ // (the relation is a BaseRelation).
+ case LogicalRelation(dest: BaseRelation, _, _) =>
+ // Get all input data source relations of the query.
+ val srcRelations = df.logicalPlan.collect {
+ case LogicalRelation(src: BaseRelation, _, _) => src
+ }
+ if (srcRelations.contains(dest)) {
+ throw new AnalysisException(
+ s"Cannot overwrite table $tableName that is also being
read from")
+ }
+ case _ => // OK
+ }
+
+ // Drop the existing table and create it again.
+ catalog.dropTable(tableIdentWithDB, ignoreIfNotExists = true,
purge = false)
+ createTable(tableIdentWithDB)
+
+ // Refresh the cache of the table in the catalog.
+ catalog.refreshTable(tableIdentWithDB)
}
- val tableDesc = CatalogTable(
- identifier = tableIdent,
- tableType = tableType,
- storage = storage,
- schema = new StructType,
- provider = Some(source),
- partitionColumnNames = partitioningColumns.getOrElse(Nil),
- bucketSpec = getBucketSpec
- )
- df.sparkSession.sessionState.executePlan(
- CreateTable(tableDesc, mode, Some(df.logicalPlan))).toRdd
+ // If the table doesn't exist...
+ case None => createTable(tableIdentWithDB)
}
}
+ private def createTable(tableIdent: TableIdentifier): Unit = {
+ val storage =
DataSource.buildStorageFormatFromOptions(extraOptions.toMap)
+ val tableType = if (storage.locationUri.isDefined) {
--- End diff --
I reverted https://github.com/apache/spark/pull/15983 here because it's not
needed anymore after this refactor.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]