This is an automated email from the ASF dual-hosted git repository.

biyan pushed a commit to branch release-0.6
in repository https://gitbox.apache.org/repos/asf/incubator-paimon.git

commit ec220f5a6410e286e546193de3699a8de764e61a
Author: Zouxxyy <zouxinyu....@alibaba-inc.com>
AuthorDate: Tue Jan 16 14:06:09 2024 +0800

    [spark] Fix call procedure compatibility with spark3.2 (#2707)
---
 .../paimon/spark/sql/CompactProcedureTest.scala    | 22 +++++++++++++++++++++
 .../extensions/PaimonSqlExtensionsAstBuilder.scala | 23 +++++++++++++++++++++-
 ...reTest.scala => CompactProcedureTestBase.scala} |  2 +-
 3 files changed, 45 insertions(+), 2 deletions(-)

diff --git 
a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/CompactProcedureTest.scala
 
b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/CompactProcedureTest.scala
new file mode 100644
index 000000000..51931ef7e
--- /dev/null
+++ 
b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/CompactProcedureTest.scala
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.paimon.spark.sql
+
+import org.apache.paimon.spark.procedure.CompactProcedureTestBase
+
+class CompactProcedureTest extends CompactProcedureTestBase {}
diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/PaimonSqlExtensionsAstBuilder.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/PaimonSqlExtensionsAstBuilder.scala
index 8caab60c2..bb8fd9a33 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/PaimonSqlExtensionsAstBuilder.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/PaimonSqlExtensionsAstBuilder.scala
@@ -23,13 +23,14 @@ import 
org.apache.paimon.spark.catalyst.plans.logical.{PaimonCallArgument, Paimo
 import org.antlr.v4.runtime._
 import org.antlr.v4.runtime.misc.Interval
 import org.antlr.v4.runtime.tree.{ParseTree, TerminalNode}
+import org.apache.spark.QueryContext
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.catalyst.expressions.Expression
 import org.apache.spark.sql.catalyst.parser.ParserInterface
 import 
org.apache.spark.sql.catalyst.parser.extensions.PaimonParserUtils.withOrigin
 import 
org.apache.spark.sql.catalyst.parser.extensions.PaimonSqlExtensionsParser._
 import org.apache.spark.sql.catalyst.plans.logical._
-import org.apache.spark.sql.catalyst.trees.{CurrentOrigin, Origin}
+import org.apache.spark.sql.catalyst.trees.SQLQueryContext
 
 import scala.collection.JavaConverters._
 
@@ -133,3 +134,23 @@ object PaimonParserUtils {
     stream.getText(Interval.of(0, stream.size() - 1))
   }
 }
+
+case class Origin(
+    line: Option[Int] = None,
+    startPosition: Option[Int] = None,
+    startIndex: Option[Int] = None,
+    stopIndex: Option[Int] = None,
+    sqlText: Option[String] = None,
+    objectType: Option[String] = None,
+    objectName: Option[String] = None) {}
+
+object CurrentOrigin {
+  private val value = new ThreadLocal[Origin]() {
+    override def initialValue: Origin = Origin()
+  }
+
+  def get: Origin = value.get()
+  def set(o: Origin): Unit = value.set(o)
+  def reset(): Unit = value.set(Origin())
+}
+/* Apache Spark copy end */
diff --git 
a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/procedure/CompactProcedureTest.scala
 
b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/procedure/CompactProcedureTestBase.scala
similarity index 99%
rename from 
paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/procedure/CompactProcedureTest.scala
rename to 
paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/procedure/CompactProcedureTestBase.scala
index 788c8dd3e..d37002d25 100644
--- 
a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/procedure/CompactProcedureTest.scala
+++ 
b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/procedure/CompactProcedureTestBase.scala
@@ -29,7 +29,7 @@ import org.assertj.core.api.Assertions
 import java.util
 
 /** Test sort compact procedure. See [[CompactProcedure]]. */
-class CompactProcedureTest extends PaimonSparkTestBase with StreamTest {
+abstract class CompactProcedureTestBase extends PaimonSparkTestBase with 
StreamTest {
 
   import testImplicits._
 

Reply via email to