This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new ee2d8ae [SPARK-35378][SQL][FOLLOWUP] Move CommandResult to
catalyst.plans.logical
ee2d8ae is described below
commit ee2d8ae322b6f236cfd6514b8be2d7c068b58722
Author: gengjiaan <[email protected]>
AuthorDate: Thu Jun 17 07:47:38 2021 -0700
[SPARK-35378][SQL][FOLLOWUP] Move CommandResult to catalyst.plans.logical
### What changes were proposed in this pull request?
https://github.com/apache/spark/pull/32513 added the case class
`CommandResult` in package `org.apache.spark.sql.expression`. It is not
suitable, so this PR move `CommandResult` from
`org.apache.spark.sql.expression` to
`org.apache.spark.sql.catalyst.plans.logical`.
### Why are the changes needed?
Make `CommandResult` in suitable package.
### Does this PR introduce _any_ user-facing change?
'No'.
### How was this patch tested?
No need.
Closes #32942 from beliefer/SPARK-35378-followup.
Lead-authored-by: gengjiaan <[email protected]>
Co-authored-by: Jiaan Geng <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../sql/{expressions => catalyst/plans/logical}/CommandResult.scala | 3 +--
.../src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala | 3 +--
.../main/scala/org/apache/spark/sql/execution/SparkStrategies.scala | 1 -
.../scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala | 3 +--
4 files changed, 3 insertions(+), 7 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/expressions/CommandResult.scala
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/CommandResult.scala
similarity index 93%
rename from
sql/core/src/main/scala/org/apache/spark/sql/expressions/CommandResult.scala
rename to
sql/core/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/CommandResult.scala
index 23f5c5f..2ef3422 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/expressions/CommandResult.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/CommandResult.scala
@@ -15,12 +15,11 @@
* limitations under the License.
*/
-package org.apache.spark.sql.expressions
+package org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.QueryPlan
-import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan,
Statistics}
import
org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.execution.SparkPlan
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
index aaa87bd..f85b680 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
@@ -30,7 +30,7 @@ import org.apache.spark.sql.catalyst.{InternalRow,
QueryPlanningTracker}
import org.apache.spark.sql.catalyst.analysis.UnsupportedOperationChecker
import org.apache.spark.sql.catalyst.expressions.codegen.ByteCodeStats
import org.apache.spark.sql.catalyst.plans.QueryPlan
-import org.apache.spark.sql.catalyst.plans.logical.{AppendData, Command,
CreateTableAsSelect, LogicalPlan, OverwriteByExpression,
OverwritePartitionsDynamic, ReplaceTableAsSelect, ReturnAnswer}
+import org.apache.spark.sql.catalyst.plans.logical.{AppendData, Command,
CommandResult, CreateTableAsSelect, LogicalPlan, OverwriteByExpression,
OverwritePartitionsDynamic, ReplaceTableAsSelect, ReturnAnswer}
import org.apache.spark.sql.catalyst.rules.{PlanChangeLogger, Rule}
import org.apache.spark.sql.catalyst.util.StringUtils.PlanStringConcat
import org.apache.spark.sql.catalyst.util.truncatedString
@@ -39,7 +39,6 @@ import
org.apache.spark.sql.execution.bucketing.{CoalesceBucketsInJoin, DisableU
import org.apache.spark.sql.execution.dynamicpruning.PlanDynamicPruningFilters
import org.apache.spark.sql.execution.exchange.{EnsureRequirements,
ReuseExchange}
import org.apache.spark.sql.execution.streaming.{IncrementalExecution,
OffsetSeqMetadata}
-import org.apache.spark.sql.expressions.CommandResult
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.util.Utils
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index 388e8b3..9026d7a 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -36,7 +36,6 @@ import
org.apache.spark.sql.execution.exchange.{REPARTITION_BY_COL, REPARTITION_
import org.apache.spark.sql.execution.python._
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.execution.streaming.sources.MemoryPlan
-import org.apache.spark.sql.expressions.CommandResult
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.StructType
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
index e67d527..82bc22f3 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
@@ -21,10 +21,9 @@ import scala.io.Source
import org.apache.spark.sql.{AnalysisException, FastOperator}
import org.apache.spark.sql.catalyst.analysis.UnresolvedNamespace
import org.apache.spark.sql.catalyst.plans.QueryPlan
-import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan,
OneRowRelation, Project, ShowTables, SubqueryAlias}
+import org.apache.spark.sql.catalyst.plans.logical.{CommandResult,
LogicalPlan, OneRowRelation, Project, ShowTables, SubqueryAlias}
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
import org.apache.spark.sql.execution.command.{ExecutedCommandExec,
ShowTablesCommand}
-import org.apache.spark.sql.expressions.CommandResult
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.util.Utils
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]