This is an automated email from the ASF dual-hosted git repository.
biyan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new 130a7aa850 [spark] Make show table extended compatible with spark3.4-
(#4754)
130a7aa850 is described below
commit 130a7aa850b6d22bb0469690b837902a9df949d9
Author: Zouxxyy <[email protected]>
AuthorDate: Mon Dec 23 16:46:47 2024 +0800
[spark] Make show table extended compatible with spark3.4- (#4754)
---
.../paimon/spark/sql/DescribeTableTest.scala | 21 +++
.../paimon/spark/sql/DescribeTableTest.scala | 21 +++
.../paimon/spark/sql/DescribeTableTest.scala | 21 +++
.../paimon/spark/sql/DescribeTableTest.scala | 21 +++
.../paimon/spark/sql/DescribeTableTest.scala | 21 +++
.../analysis/PaimonResolvePartitionSpec.scala | 8 +-
.../paimon/spark/sql/DescribeTableTest.scala | 154 --------------------
.../paimon/spark/sql/DescribeTableTestBase.scala | 159 +++++++++++++++++++++
.../catalyst/analysis/Spark3ResolutionRules.scala | 2 -
.../commands/PaimonShowTablePartitionCommand.scala | 9 +-
.../commands/PaimonShowTablesExtendedCommand.scala | 8 +-
11 files changed, 283 insertions(+), 162 deletions(-)
diff --git
a/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
new file mode 100644
index 0000000000..c6aa774192
--- /dev/null
+++
b/paimon-spark/paimon-spark-3.2/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class DescribeTableTest extends DescribeTableTestBase {}
diff --git
a/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
new file mode 100644
index 0000000000..c6aa774192
--- /dev/null
+++
b/paimon-spark/paimon-spark-3.3/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class DescribeTableTest extends DescribeTableTestBase {}
diff --git
a/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
new file mode 100644
index 0000000000..c6aa774192
--- /dev/null
+++
b/paimon-spark/paimon-spark-3.4/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class DescribeTableTest extends DescribeTableTestBase {}
diff --git
a/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
new file mode 100644
index 0000000000..c6aa774192
--- /dev/null
+++
b/paimon-spark/paimon-spark-3.5/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class DescribeTableTest extends DescribeTableTestBase {}
diff --git
a/paimon-spark/paimon-spark-4.0/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
b/paimon-spark/paimon-spark-4.0/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
new file mode 100644
index 0000000000..c6aa774192
--- /dev/null
+++
b/paimon-spark/paimon-spark-4.0/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+class DescribeTableTest extends DescribeTableTestBase {}
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonResolvePartitionSpec.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonResolvePartitionSpec.scala
index 5d6a5a063c..b401fdefa1 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonResolvePartitionSpec.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/PaimonResolvePartitionSpec.scala
@@ -18,12 +18,14 @@
package org.apache.paimon.spark.catalyst.analysis
+import org.apache.paimon.spark.catalyst.Compatibility
+
import org.apache.spark.sql.PaimonUtils.{normalizePartitionSpec,
requireExactMatchedPartitionSpec}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{PartitionSpec,
ResolvedPartitionSpec, UnresolvedPartitionSpec}
import org.apache.spark.sql.catalyst.analysis.ResolvePartitionSpec.conf
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
-import org.apache.spark.sql.catalyst.expressions.{Cast, Literal}
+import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits._
@@ -68,7 +70,9 @@ object PaimonResolvePartitionSpec {
part =>
val raw = partitionSpec.get(part.name).orNull
val dt = CharVarcharUtils.replaceCharVarcharWithString(part.dataType)
- Cast(Literal.create(raw, StringType), dt,
Some(conf.sessionLocalTimeZone)).eval()
+ Compatibility
+ .cast(Literal.create(raw, StringType), dt,
Some(conf.sessionLocalTimeZone))
+ .eval()
}
InternalRow.fromSeq(partValues)
}
diff --git
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
deleted file mode 100644
index ae538fa48c..0000000000
---
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTest.scala
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.paimon.spark.sql
-
-import org.apache.paimon.spark.PaimonSparkTestBase
-
-import org.apache.spark.sql.Row
-import org.junit.jupiter.api.Assertions
-
-import java.util.Objects
-
-class DescribeTableTest extends PaimonSparkTestBase {
-
- test("Paimon show: show table extended") {
- val testDB = "test_show"
- withDatabase(testDB) {
- spark.sql("CREATE TABLE s1 (id INT)")
-
- spark.sql(s"CREATE DATABASE $testDB")
- spark.sql(s"USE $testDB")
- spark.sql("CREATE TABLE s2 (id INT, pt STRING) PARTITIONED BY (pt)")
- spark.sql("CREATE TABLE s3 (id INT, pt1 STRING, pt2 STRING) PARTITIONED
BY (pt1, pt2)")
-
- spark.sql("INSERT INTO s2 VALUES (1, '2024'), (2, '2024'), (3, '2025'),
(4, '2026')")
- spark.sql("""
- |INSERT INTO s3
- |VALUES
- |(1, '2024', '11'), (2, '2024', '12'), (3, '2025', '11'),
(4, '2025', '12')
- |""".stripMargin)
-
- // SHOW TABL EXTENDED will give four columns: namespace, tableName,
isTemporary, information.
- checkAnswer(
- sql(s"SHOW TABLE EXTENDED IN $dbName0 LIKE '*'")
- .select("namespace", "tableName", "isTemporary"),
- Row("test", "s1", false))
- checkAnswer(
- sql(s"SHOW TABLE EXTENDED IN $testDB LIKE '*'")
- .select("namespace", "tableName", "isTemporary"),
- Row(testDB, "s2", false) :: Row(testDB, "s3", false) :: Nil
- )
-
- // check table s1
- val res1 = spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE
's2'").select("information")
- Assertions.assertEquals(1, res1.count())
- val information1 = res1
- .collect()
- .head
- .getString(0)
- .split("\n")
- .map {
- line =>
- val kv = line.split(": ", 2)
- kv(0) -> kv(1)
- }
- .toMap
- Assertions.assertEquals(information1("Catalog"), "paimon")
- Assertions.assertEquals(information1("Namespace"), testDB)
- Assertions.assertEquals(information1("Table"), "s2")
- Assertions.assertEquals(information1("Provider"), "paimon")
- Assertions.assertEquals(information1("Location"), loadTable(testDB,
"s2").location().toString)
-
- // check table s2 partition info
- val error1 = intercept[Exception] {
- spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE 's2'
PARTITION(pt='2022')")
- }.getMessage
- assert(error1.contains("PARTITIONS_NOT_FOUND"))
-
- val error2 = intercept[Exception] {
- spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE 's3'
PARTITION(pt1='2024')")
- }.getMessage
- assert(error2.contains("Partition spec is invalid"))
-
- val res2 =
- spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE 's3' PARTITION(pt1 =
'2024', pt2 = 11)")
- checkAnswer(
- res2.select("namespace", "tableName", "isTemporary"),
- Row(testDB, "s3", false)
- )
- Assertions.assertTrue(
-
res2.select("information").collect().head.getString(0).contains("Partition
Values"))
- }
- }
-
- test(s"Paimon describe: describe table comment") {
- var comment = "test comment"
- spark.sql(s"""
- |CREATE TABLE T (
- | id INT COMMENT 'id comment',
- | name STRING,
- | dt STRING)
- |COMMENT '$comment'
- |""".stripMargin)
- checkTableCommentEqual("T", comment)
-
- comment = "new comment"
- spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
- checkTableCommentEqual("T", comment)
-
- comment = " "
- spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
- checkTableCommentEqual("T", comment)
-
- comment = ""
- spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
- checkTableCommentEqual("T", comment)
-
- spark.sql(s"ALTER TABLE T UNSET TBLPROPERTIES ('comment')")
- checkTableCommentEqual("T", null)
-
- comment = "new comment"
- spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
- checkTableCommentEqual("T", comment)
- }
-
- test(s"Paimon describe: describe table with no comment") {
- spark.sql(s"""
- |CREATE TABLE T (
- | id INT COMMENT 'id comment',
- | name STRING,
- | dt STRING)
- |""".stripMargin)
- checkTableCommentEqual("T", null)
- }
-
- def checkTableCommentEqual(tableName: String, comment: String): Unit = {
- // check describe table
- checkAnswer(
- spark
- .sql(s"DESCRIBE TABLE EXTENDED $tableName")
- .filter("col_name = 'Comment'")
- .select("col_name", "data_type"),
- if (comment == null) Nil else Row("Comment", comment) :: Nil
- )
-
- // check comment in schema
- Assertions.assertTrue(Objects.equals(comment,
loadTable(tableName).schema().comment()))
- }
-}
diff --git
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTestBase.scala
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTestBase.scala
new file mode 100644
index 0000000000..e020d5fbfe
--- /dev/null
+++
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DescribeTableTestBase.scala
@@ -0,0 +1,159 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+import org.apache.paimon.spark.PaimonSparkTestBase
+
+import org.apache.spark.sql.Row
+import org.junit.jupiter.api.Assertions
+
+import java.util.Objects
+
+abstract class DescribeTableTestBase extends PaimonSparkTestBase {
+
+ test("Paimon show: show table extended") {
+ val testDB = "test_show"
+ withTable(s"$dbName0.s1") {
+ spark.sql("CREATE TABLE s1 (id INT)")
+ withDatabase(testDB) {
+ spark.sql(s"CREATE DATABASE $testDB")
+ spark.sql(s"USE $testDB")
+ withTable("s2", "s3") {
+ spark.sql("CREATE TABLE s2 (id INT, pt STRING) PARTITIONED BY (pt)")
+ spark.sql("CREATE TABLE s3 (id INT, pt1 STRING, pt2 STRING)
PARTITIONED BY (pt1, pt2)")
+
+ spark.sql("INSERT INTO s2 VALUES (1, '2024'), (2, '2024'), (3,
'2025'), (4, '2026')")
+ spark.sql("""
+ |INSERT INTO s3
+ |VALUES
+ |(1, '2024', '11'), (2, '2024', '12'), (3, '2025',
'11'), (4, '2025', '12')
+ |""".stripMargin)
+
+ // SHOW TABLE EXTENDED will give four columns: namespace, tableName,
isTemporary, information.
+ checkAnswer(
+ sql(s"SHOW TABLE EXTENDED IN $dbName0 LIKE '*'")
+ .select("namespace", "tableName", "isTemporary"),
+ Row("test", "s1", false))
+ checkAnswer(
+ sql(s"SHOW TABLE EXTENDED IN $testDB LIKE '*'")
+ .select("namespace", "tableName", "isTemporary"),
+ Row(testDB, "s2", false) :: Row(testDB, "s3", false) :: Nil
+ )
+
+ // check table s1
+ val res1 = spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE
's2'").select("information")
+ Assertions.assertEquals(1, res1.count())
+ val information1 = res1
+ .collect()
+ .head
+ .getString(0)
+ .split("\n")
+ .map {
+ line =>
+ val kv = line.split(": ", 2)
+ kv(0) -> kv(1)
+ }
+ .toMap
+ Assertions.assertEquals(information1("Catalog"), "paimon")
+ Assertions.assertEquals(information1("Namespace"), testDB)
+ Assertions.assertEquals(information1("Table"), "s2")
+ Assertions.assertEquals(information1("Provider"), "paimon")
+ Assertions.assertEquals(
+ information1("Location"),
+ loadTable(testDB, "s2").location().toString)
+
+ // check table s2 partition info
+ val error1 = intercept[Exception] {
+ spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE 's2'
PARTITION(pt='2022')")
+ }.getMessage
+ assert(error1.contains("PARTITIONS_NOT_FOUND"))
+
+ val error2 = intercept[Exception] {
+ spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE 's3'
PARTITION(pt1='2024')")
+ }.getMessage
+ assert(error2.contains("Partition spec is invalid"))
+
+ val res2 =
+ spark.sql(s"SHOW TABLE EXTENDED IN $testDB LIKE 's3' PARTITION(pt1
= '2024', pt2 = 11)")
+ checkAnswer(
+ res2.select("namespace", "tableName", "isTemporary"),
+ Row(testDB, "s3", false)
+ )
+ Assertions.assertTrue(
+
res2.select("information").collect().head.getString(0).contains("Partition
Values"))
+ }
+ }
+ }
+ }
+
+ test(s"Paimon describe: describe table comment") {
+ var comment = "test comment"
+ spark.sql(s"""
+ |CREATE TABLE T (
+ | id INT COMMENT 'id comment',
+ | name STRING,
+ | dt STRING)
+ |COMMENT '$comment'
+ |""".stripMargin)
+ checkTableCommentEqual("T", comment)
+
+ comment = "new comment"
+ spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
+ checkTableCommentEqual("T", comment)
+
+ comment = " "
+ spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
+ checkTableCommentEqual("T", comment)
+
+ comment = ""
+ spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
+ checkTableCommentEqual("T", comment)
+
+ spark.sql(s"ALTER TABLE T UNSET TBLPROPERTIES ('comment')")
+ checkTableCommentEqual("T", null)
+
+ comment = "new comment"
+ spark.sql(s"ALTER TABLE T SET TBLPROPERTIES ('comment' = '$comment')")
+ checkTableCommentEqual("T", comment)
+ }
+
+ test(s"Paimon describe: describe table with no comment") {
+ spark.sql(s"""
+ |CREATE TABLE T (
+ | id INT COMMENT 'id comment',
+ | name STRING,
+ | dt STRING)
+ |""".stripMargin)
+ checkTableCommentEqual("T", null)
+ }
+
+ def checkTableCommentEqual(tableName: String, comment: String): Unit = {
+ // check describe table
+ checkAnswer(
+ spark
+ .sql(s"DESCRIBE TABLE EXTENDED $tableName")
+ .filter("col_name = 'Comment'")
+ .select("col_name", "data_type"),
+ if (comment == null) Nil else Row("Comment", comment) :: Nil
+ )
+
+ // check comment in schema
+ Assertions.assertTrue(Objects.equals(comment,
loadTable(tableName).schema().comment()))
+ }
+}
diff --git
a/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/Spark3ResolutionRules.scala
b/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/Spark3ResolutionRules.scala
index 924df2d1e3..9e62e617a1 100644
---
a/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/Spark3ResolutionRules.scala
+++
b/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/Spark3ResolutionRules.scala
@@ -50,7 +50,5 @@ case class Spark3ResolutionRules(session: SparkSession)
.getOrElse {
PaimonShowTablesExtendedCommand(catalog.asTableCatalog, ns, pattern,
output)
}
-
}
-
}
diff --git
a/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablePartitionCommand.scala
b/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablePartitionCommand.scala
index 32f9498585..ac98a807ca 100644
---
a/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablePartitionCommand.scala
+++
b/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablePartitionCommand.scala
@@ -18,16 +18,17 @@
package org.apache.paimon.spark.commands
+import org.apache.paimon.spark.catalyst.Compatibility
import org.apache.paimon.spark.leafnode.PaimonLeafRunnableCommand
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.analysis.ResolvedPartitionSpec
import
org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.escapePathName
-import org.apache.spark.sql.catalyst.expressions.{Attribute, ToPrettyString}
-import org.apache.spark.sql.catalyst.expressions.Literal
+import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Literal,
ToPrettyString}
import org.apache.spark.sql.connector.catalog.{Identifier,
SupportsPartitionManagement, TableCatalog}
import org.apache.spark.sql.connector.catalog.PaimonCatalogImplicits._
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits._
+import org.apache.spark.sql.types.StringType
import scala.collection.JavaConverters._
import scala.collection.mutable
@@ -77,7 +78,9 @@ case class PaimonShowTablePartitionCommand(
for (i <- 0 until len) {
val dataType = partitionSchema(i).dataType
val partValueUTF8String =
- ToPrettyString(Literal(row.get(i, dataType), dataType),
Some(timeZoneId)).eval(null)
+ Compatibility
+ .cast(Literal(row.get(i, dataType), dataType), StringType,
Some(timeZoneId))
+ .eval(null)
val partValueStr = if (partValueUTF8String == null) "null" else
partValueUTF8String.toString
partitions(i) = escapePathName(partitionSchema(i).name) + "=" +
escapePathName(partValueStr)
}
diff --git
a/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablesExtendedCommand.scala
b/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablesExtendedCommand.scala
index b393982e25..b998acbd5c 100644
---
a/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablesExtendedCommand.scala
+++
b/paimon-spark/paimon-spark3-common/src/main/scala/org/apache/paimon/spark/commands/PaimonShowTablesExtendedCommand.scala
@@ -99,7 +99,7 @@ case class PaimonShowTablesExtendedCommand(
"Partition Columns",
table.asPartitionable
.partitionSchema()
- .map(field => QuotingUtils.quoteIdentifier(field.name))
+ .map(field => quoteIdentifier(field.name))
.mkString("[", ", ", "]"))
}
@@ -120,4 +120,10 @@ case class PaimonShowTablesExtendedCommand(
case _ => false
}
+ // copy from spark for compatibility
+ private def quoteIdentifier(name: String): String = {
+ // Escapes back-ticks within the identifier name with double-back-ticks,
and then quote the
+ // identifier with back-ticks.
+ "`" + name.replace("`", "``") + "`"
+ }
}