This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new d900c6f [SPARK-33293][SQL][FOLLOW-UP] Rename TableWriteExec to
TableWriteExecHelper
d900c6f is described below
commit d900c6ff49ed898163f562d1211743decb75c601
Author: Chao Sun <[email protected]>
AuthorDate: Tue Nov 3 14:53:01 2020 -0800
[SPARK-33293][SQL][FOLLOW-UP] Rename TableWriteExec to TableWriteExecHelper
### What changes were proposed in this pull request?
Rename `TableWriteExec` in `WriteToDataSourceV2Exec.scala` to
`TableWriteExecHelper`.
### Why are the changes needed?
See
[discussion](https://github.com/apache/spark/pull/30193#discussion_r516412653).
The former is too general.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
N/A
Closes #30235 from sunchao/SPARK-33293-2.
Authored-by: Chao Sun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
index efa2c31..1421a93 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
@@ -66,7 +66,7 @@ case class CreateTableAsSelectExec(
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
- ifNotExists: Boolean) extends TableWriteExec {
+ ifNotExists: Boolean) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
if (catalog.tableExists(ident)) {
@@ -100,7 +100,7 @@ case class AtomicCreateTableAsSelectExec(
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
- ifNotExists: Boolean) extends TableWriteExec {
+ ifNotExists: Boolean) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
if (catalog.tableExists(ident)) {
@@ -134,7 +134,7 @@ case class ReplaceTableAsSelectExec(
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
- orCreate: Boolean) extends TableWriteExec {
+ orCreate: Boolean) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
// Note that this operation is potentially unsafe, but these are the
strict semantics of
@@ -176,7 +176,7 @@ case class AtomicReplaceTableAsSelectExec(
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
- orCreate: Boolean) extends TableWriteExec {
+ orCreate: Boolean) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
val schema = query.schema.asNullable
@@ -432,7 +432,7 @@ object DataWritingSparkTask extends Logging {
}
}
-private[v2] trait TableWriteExec extends V2TableWriteExec with SupportsV1Write
{
+private[v2] trait TableWriteExecHelper extends V2TableWriteExec with
SupportsV1Write {
import
org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper
protected def writeToTable(
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]