This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 827d2a016c54 [SPARK-50638][SQL] Refactor the view resolution into the
separate file to reuse it in the single-pass Analyzer
827d2a016c54 is described below
commit 827d2a016c54e2c89b35a8128aefaabf677b9a7b
Author: Vladimir Golubev <[email protected]>
AuthorDate: Sun Dec 22 22:35:00 2024 +0300
[SPARK-50638][SQL] Refactor the view resolution into the separate file to
reuse it in the single-pass Analyzer
### What changes were proposed in this pull request?
Refactor the view resolution into the separate file to reuse it in the
single-pass Analyzer.
### Why are the changes needed?
To reuse this code from the single-pass Analyzer.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Existing tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #49255 from
vladimirg-db/vladimirg-db/refactor-view-resolution-to-a-separate-file.
Authored-by: Vladimir Golubev <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../spark/sql/catalyst/analysis/Analyzer.scala | 23 ++-------
.../sql/catalyst/analysis/ViewResolution.scala | 55 ++++++++++++++++++++++
2 files changed, 58 insertions(+), 20 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 7c97fccade01..35ae0125d141 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -1036,26 +1036,9 @@ class Analyzer(override val catalogManager:
CatalogManager) extends RuleExecutor
// If `AnalysisContext.catalogAndNamespace` is non-empty, analyzer will
expand single-part names
// with it, instead of current catalog and namespace.
private def resolveViews(plan: LogicalPlan): LogicalPlan = plan match {
- // The view's child should be a logical plan parsed from the
`desc.viewText`, the variable
- // `viewText` should be defined, or else we throw an error on the
generation of the View
- // operator.
- case view @ View(desc, isTempView, child) if !child.resolved =>
- // Resolve all the UnresolvedRelations and Views in the child.
- val newChild = AnalysisContext.withAnalysisContext(desc) {
- val nestedViewDepth = AnalysisContext.get.nestedViewDepth
- val maxNestedViewDepth = AnalysisContext.get.maxNestedViewDepth
- if (nestedViewDepth > maxNestedViewDepth) {
- throw
QueryCompilationErrors.viewDepthExceedsMaxResolutionDepthError(
- desc.identifier, maxNestedViewDepth, view)
- }
- SQLConf.withExistingConf(View.effectiveSQLConf(desc.viewSQLConfigs,
isTempView)) {
- executeSameContext(child)
- }
- }
- // Fail the analysis eagerly because outside AnalysisContext, the
unresolved operators
- // inside a view maybe resolved incorrectly.
- checkAnalysis(newChild)
- view.copy(child = newChild)
+ case view: View if !view.child.resolved =>
+ ViewResolution
+ .resolve(view, resolveChild = executeSameContext, checkAnalysis =
checkAnalysis)
case p @ SubqueryAlias(_, view: View) =>
p.copy(child = resolveViews(view))
case _ => plan
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ViewResolution.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ViewResolution.scala
new file mode 100644
index 000000000000..89ef29ddaaf1
--- /dev/null
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ViewResolution.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.analysis
+
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, View}
+import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.internal.SQLConf
+
+object ViewResolution {
+ def resolve(
+ view: View,
+ resolveChild: LogicalPlan => LogicalPlan,
+ checkAnalysis: LogicalPlan => Unit): View = {
+ // The view's child should be a logical plan parsed from the
`desc.viewText`, the variable
+ // `viewText` should be defined, or else we throw an error on the
generation of the View
+ // operator.
+
+ // Resolve all the UnresolvedRelations and Views in the child.
+ val newChild = AnalysisContext.withAnalysisContext(view.desc) {
+ val nestedViewDepth = AnalysisContext.get.nestedViewDepth
+ val maxNestedViewDepth = AnalysisContext.get.maxNestedViewDepth
+ if (nestedViewDepth > maxNestedViewDepth) {
+ throw QueryCompilationErrors.viewDepthExceedsMaxResolutionDepthError(
+ view.desc.identifier,
+ maxNestedViewDepth,
+ view
+ )
+ }
+ SQLConf.withExistingConf(View.effectiveSQLConf(view.desc.viewSQLConfigs,
view.isTempView)) {
+ resolveChild(view.child)
+ }
+ }
+
+ // Fail the analysis eagerly because outside AnalysisContext, the
unresolved operators
+ // inside a view maybe resolved incorrectly.
+ checkAnalysis(newChild)
+
+ view.copy(child = newChild)
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]