Github user gatorsmile commented on a diff in the pull request:
https://github.com/apache/spark/pull/22941#discussion_r230608937
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoDataSourceCommand.scala
---
@@ -30,14 +30,13 @@ import org.apache.spark.sql.sources.InsertableRelation
case class InsertIntoDataSourceCommand(
logicalRelation: LogicalRelation,
query: LogicalPlan,
- overwrite: Boolean)
- extends RunnableCommand {
+ overwrite: Boolean,
+ outputColumnNames: Seq[String])
+ extends DataWritingCommand {
- override protected def innerChildren: Seq[QueryPlan[_]] = Seq(query)
-
- override def run(sparkSession: SparkSession): Seq[Row] = {
+ override def run(sparkSession: SparkSession, child: SparkPlan): Seq[Row]
= {
val relation =
logicalRelation.relation.asInstanceOf[InsertableRelation]
- val data = Dataset.ofRows(sparkSession, query)
--- End diff --
This will use the cached data, although the plan does not show the cached
data is used.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]