This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new caf6cf8f52 [test][spark] Add alter with incompatible col type test 
case (#6689)
caf6cf8f52 is described below

commit caf6cf8f5246dbd3e6b0f291424591cf4a022296
Author: Zouxxyy <[email protected]>
AuthorDate: Thu Nov 27 17:52:36 2025 +0800

    [test][spark] Add alter with incompatible col type test case (#6689)
---
 .../org/apache/paimon/hive/TestHiveMetastore.java  | 27 +++++-----
 .../apache/paimon/spark/PaimonHiveTestBase.scala   |  5 +-
 .../spark/sql/DDLWithIncompatibleColType.scala     | 60 ++++++++++++++++++++++
 .../paimon/spark/sql/FormatTableTestBase.scala     |  1 +
 4 files changed, 79 insertions(+), 14 deletions(-)

diff --git 
a/paimon-hive/paimon-hive-common/src/test/java/org/apache/paimon/hive/TestHiveMetastore.java
 
b/paimon-hive/paimon-hive-common/src/test/java/org/apache/paimon/hive/TestHiveMetastore.java
index 2f92fb37b8..9d29b0d992 100644
--- 
a/paimon-hive/paimon-hive-common/src/test/java/org/apache/paimon/hive/TestHiveMetastore.java
+++ 
b/paimon-hive/paimon-hive-common/src/test/java/org/apache/paimon/hive/TestHiveMetastore.java
@@ -115,12 +115,16 @@ public class TestHiveMetastore {
     private TServer server;
     private HiveMetaStore.HMSHandler baseHandler;
 
+    public void start(Configuration configuration, int port) {
+        start(configuration, DEFAULT_POOL_SIZE, port);
+    }
+
     /**
      * Starts a TestHiveMetastore with the default connection pool size (5) 
and the default
      * HiveConf.
      */
     public void start(int port) {
-        start(new HiveConf(new Configuration(), TestHiveMetastore.class), 
DEFAULT_POOL_SIZE, port);
+        start(new Configuration(), DEFAULT_POOL_SIZE, port);
     }
 
     /**
@@ -128,7 +132,7 @@ public class TestHiveMetastore {
      * HiveConf.
      */
     public void start() {
-        start(new HiveConf(new Configuration(), TestHiveMetastore.class), 
DEFAULT_POOL_SIZE, 9083);
+        start(new Configuration(), DEFAULT_POOL_SIZE, 9083);
     }
 
     /**
@@ -137,13 +141,11 @@ public class TestHiveMetastore {
      * @param conf The hive configuration to use
      * @param poolSize The number of threads in the executor pool
      */
-    public void start(HiveConf conf, int poolSize, int portNum) {
+    public void start(Configuration conf, int poolSize, int portNum) {
         try {
             TServerSocket socket = new TServerSocket(portNum);
             int port = socket.getServerSocket().getLocalPort();
-            initConf(conf, port);
-
-            this.hiveConf = conf;
+            this.hiveConf = initConf(conf, port);
             this.server = newThriftServer(socket, poolSize, hiveConf);
             this.executorService = Executors.newSingleThreadExecutor();
             this.executorService.submit(() -> server.serve());
@@ -211,16 +213,17 @@ public class TestHiveMetastore {
         return new TThreadPoolServer(args);
     }
 
-    private void initConf(HiveConf conf, int port) {
-        conf.set(HiveConf.ConfVars.METASTOREURIS.varname, 
"thrift://localhost:" + port);
-        conf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir());
-        conf.set(HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL.varname, "false");
-        conf.set(
+    private HiveConf initConf(Configuration conf, int port) {
+        conf.setIfUnset(HiveConf.ConfVars.METASTOREURIS.varname, 
"thrift://localhost:" + port);
+        conf.setIfUnset(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, 
warehouseDir());
+        conf.setIfUnset(HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL.varname, 
"false");
+        conf.setIfUnset(
                 
HiveConf.ConfVars.METASTORE_DISALLOW_INCOMPATIBLE_COL_TYPE_CHANGES.varname,
                 "false");
-        conf.set(
+        conf.setIfUnset(
                 HiveConf.ConfVars.HIVE_IN_TEST.varname,
                 HiveConf.ConfVars.HIVE_IN_TEST.getDefaultValue());
+        return new HiveConf(conf, TestHiveMetastore.class);
     }
 
     private static void setupMetastoreDB(String dbURL) throws SQLException, 
IOException {
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/PaimonHiveTestBase.scala
 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/PaimonHiveTestBase.scala
index 3845a6b8b9..c6ebd327f1 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/PaimonHiveTestBase.scala
+++ 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/PaimonHiveTestBase.scala
@@ -20,6 +20,7 @@ package org.apache.paimon.spark
 
 import org.apache.paimon.catalog.{Catalog, DelegateCatalog}
 import org.apache.paimon.hive.{HiveCatalog, TestHiveMetastore}
+import org.apache.paimon.spark.PaimonHiveTestBase._
 import org.apache.paimon.table.FileStoreTable
 
 import org.apache.hadoop.conf.Configuration
@@ -41,7 +42,7 @@ class PaimonHiveTestBase extends PaimonSparkTestBase {
 
   protected val hiveDbName: String = "test_hive"
 
-  val hiveUri: String = PaimonHiveTestBase.hiveUri
+  protected def configuration: Configuration = new Configuration
 
   /**
    * Add spark_catalog ([[SparkGenericCatalog]] in hive) and paimon_hive 
([[SparkCatalog]] in hive)
@@ -61,7 +62,7 @@ class PaimonHiveTestBase extends PaimonSparkTestBase {
   }
 
   override protected def beforeAll(): Unit = {
-    testHiveMetastore.start(PaimonHiveTestBase.hivePort)
+    testHiveMetastore.start(configuration, hivePort)
     super.beforeAll()
     spark.sql(s"USE $sparkCatalogName")
     spark.sql(s"CREATE DATABASE IF NOT EXISTS $hiveDbName")
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithIncompatibleColType.scala
 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithIncompatibleColType.scala
new file mode 100644
index 0000000000..ada78dc019
--- /dev/null
+++ 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/DDLWithIncompatibleColType.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.spark.sql
+
+import org.apache.paimon.spark.PaimonHiveTestBase
+
+import org.apache.hadoop.conf.Configuration
+
+class DDLWithDisallowIncompatibleColType extends DDLWithIncompatibleColType {
+  val disallowIncompatible: Boolean = true
+}
+
+class DDLWithAllowIncompatibleColType extends DDLWithIncompatibleColType {
+  val disallowIncompatible: Boolean = false
+}
+
+abstract class DDLWithIncompatibleColType extends PaimonHiveTestBase {
+
+  def disallowIncompatible: Boolean
+
+  override def configuration: Configuration = {
+    val conf_ = super.configuration
+    conf_.set(
+      "hive.metastore.disallow.incompatible.col.type.changes",
+      disallowIncompatible.toString)
+    conf_
+  }
+
+  test("Paimon DDL with hive catalog: alter with incompatible col type") {
+    withTable("t") {
+      spark.sql("CREATE TABLE t (a INT, b INT, c STRUCT<f1: INT>) USING 
paimon")
+      if (disallowIncompatible) {
+        val e = intercept[Exception] {
+          spark.sql("ALTER TABLE t DROP COLUMN b")
+        }
+        assert(
+          e.getMessage.contains(
+            "The following columns have types incompatible with the existing 
columns"))
+      } else {
+        spark.sql("ALTER TABLE t DROP COLUMN b")
+      }
+    }
+  }
+}
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
index ad2384d49d..935f9176e1 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
+++ 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
@@ -22,6 +22,7 @@ import org.apache.paimon.catalog.{DelegateCatalog, Identifier}
 import org.apache.paimon.fs.Path
 import org.apache.paimon.hive.HiveCatalog
 import org.apache.paimon.spark.PaimonHiveTestBase
+import org.apache.paimon.spark.PaimonHiveTestBase.hiveUri
 import org.apache.paimon.table.FormatTable
 import org.apache.paimon.utils.CompressUtils
 

Reply via email to