rdblue commented on a change in pull request #23606: [SPARK-26666][SQL] Support
DSv2 overwrite and dynamic partition overwrite.
URL: https://github.com/apache/spark/pull/23606#discussion_r251259498
##########
File path: sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
##########
@@ -253,29 +254,38 @@ final class DataFrameWriter[T] private[sql](ds:
Dataset[T]) {
val dsOptions = new DataSourceOptions(options.asJava)
provider.getTable(dsOptions) match {
case table: SupportsBatchWrite =>
- if (mode == SaveMode.Append) {
- val relation = DataSourceV2Relation.create(table, options)
- runCommand(df.sparkSession, "save") {
- AppendData.byName(relation, df.logicalPlan)
- }
- } else {
- val writeBuilder = table.newWriteBuilder(dsOptions)
- .withQueryId(UUID.randomUUID().toString)
- .withInputDataSchema(df.logicalPlan.schema)
- writeBuilder match {
- case s: SupportsSaveMode =>
- val write = s.mode(mode).buildForBatch()
- // It can only return null with `SupportsSaveMode`. We can
clean it up after
- // removing `SupportsSaveMode`.
- if (write != null) {
- runCommand(df.sparkSession, "save") {
- WriteToDataSourceV2(write, df.logicalPlan)
+ lazy val relation = DataSourceV2Relation.create(table, options)
+ mode match {
+ case SaveMode.Append =>
+ runCommand(df.sparkSession, "save") {
+ AppendData.byName(relation, df.logicalPlan)
+ }
+
+ case SaveMode.Overwrite =>
+ // truncate the table
+ runCommand(df.sparkSession, "save") {
+ OverwriteByExpression.byName(relation, df.logicalPlan,
Literal(true))
Review comment:
@gatorsmile, I verified that file system sources truncate in all cases when
mode is overwrite. I've updated this to always truncate as you requested.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]