Peng-Lei commented on a change in pull request #34476:
URL: https://github.com/apache/spark/pull/34476#discussion_r742584235
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
##########
@@ -18,212 +18,25 @@
package org.apache.spark.sql.execution.command
import java.net.URI
-import java.util.{Collections, Locale}
-
-import org.mockito.ArgumentMatchers.any
-import org.mockito.Mockito.{mock, when}
-import org.mockito.invocation.InvocationOnMock
+import java.util.Locale
import org.apache.spark.sql.{AnalysisException, SaveMode}
import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
-import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, Analyzer,
EmptyFunctionRegistry, NoSuchTableException, ResolvedFieldName, ResolvedTable,
ResolveSessionCatalog, UnresolvedAttribute, UnresolvedRelation,
UnresolvedSubqueryColumnAliases, UnresolvedTable}
-import org.apache.spark.sql.catalyst.catalog.{BucketSpec,
CatalogStorageFormat, CatalogTable, CatalogTableType, InMemoryCatalog,
SessionCatalog}
+import org.apache.spark.sql.catalyst.analysis.{ResolvedFieldName,
ResolvedTable, UnresolvedAttribute, UnresolvedRelation,
UnresolvedSubqueryColumnAliases, UnresolvedTable}
+import org.apache.spark.sql.catalyst.catalog.{BucketSpec,
CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.expressions.{AnsiCast,
AttributeReference, EqualTo, Expression, InSubquery, IntegerLiteral, ListQuery,
Literal, StringLiteral}
import org.apache.spark.sql.catalyst.expressions.objects.StaticInvoke
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException}
-import org.apache.spark.sql.catalyst.plans.logical.{AlterColumn,
AnalysisOnlyCommand, AppendData, Assignment, CreateTableAsSelect,
CreateTableStatement, CreateV2Table, DeleteAction, DeleteFromTable,
DescribeRelation, DropTable, InsertAction, LocalRelation, LogicalPlan,
MergeIntoTable, OneRowRelation, Project, SetTableLocation, SetTableProperties,
ShowTableProperties, SubqueryAlias, UnsetTableProperties, UpdateAction,
UpdateTable}
-import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.connector.FakeV2Provider
-import org.apache.spark.sql.connector.catalog.{CatalogManager,
CatalogNotFoundException, Identifier, Table, TableCapability, TableCatalog,
V1Table}
-import org.apache.spark.sql.connector.expressions.Transform
+import org.apache.spark.sql.catalyst.plans.logical.{AlterColumn,
AnalysisOnlyCommand, AppendData, Assignment, CreateTableAsSelect,
CreateTableStatement, CreateV2Table, DeleteAction, DeleteFromTable,
DescribeRelation, DropTable, InsertAction, LogicalPlan, MergeIntoTable,
OneRowRelation, Project, SetTableLocation, SetTableProperties,
ShowTableProperties, SubqueryAlias, UnsetTableProperties, UpdateAction,
UpdateTable}
+import org.apache.spark.sql.connector.catalog.{CatalogManager, Identifier}
import org.apache.spark.sql.execution.datasources.CreateTable
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
-import org.apache.spark.sql.sources.SimpleScanSource
import org.apache.spark.sql.types.{BooleanType, CharType, DoubleType,
IntegerType, LongType, StringType, StructField, StructType}
-class PlanResolutionSuite extends AnalysisTest {
+class PlanResolutionSuite extends PlanResolutionSuiteBase {
Review comment:
> Today we have a unified framework to resolve database objects in
commands, and we don't need to test plan resolution for each command.
Thank you for reminding me, you're quite right. I will undo the last
modification
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]