This is an automated email from the ASF dual-hosted git repository.

fanng pushed a commit to branch branch-0.9
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/branch-0.9 by this push:
     new e67976364e [#6697] fix(spark): fix throw exception during PG table 
creation in spark connector (#7042)
e67976364e is described below

commit e67976364e0e44fa8979abf4b0463bcef7d9a022
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Wed Apr 23 10:39:36 2025 +0800

    [#6697] fix(spark): fix throw exception during PG table creation in spark 
connector (#7042)
    
    ### What changes were proposed in this pull request?
    
    Filtering out `owner` table properties, as table properties not
    supported in PG and will cause issue in:
    https://github.com/apache/gravitino/issues/6697 during PG table creation
    
    ### Why are the changes needed?
    
    fix https://github.com/apache/gravitino/issues/6697
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Tested in local environment, following the steps in
    https://github.com/apache/gravitino/issues/6697#issuecomment-2746218039
    
    ```
    spark-sql ()> create table public.a(a int);
    Time taken: 0.483 seconds
    spark-sql ()>
    ```
    
    <img width="1403" alt="image"
    
src="https://github.com/user-attachments/assets/21e1e557-ac7c-4ebb-8dae-c1d6dddbbd19";
    />
    
    TODO integration test.
    
    Co-authored-by: Tian Lu <[email protected]>
---
 .../integration/test/util/TestDatabaseName.java    | 12 ++++++
 spark-connector/spark-common/build.gradle.kts      |  1 +
 .../connector/jdbc/JdbcPropertiesConstants.java    |  1 +
 .../connector/jdbc/JdbcPropertiesConverter.java    | 26 +++++++++++-
 .../connector/version/CatalogNameAdaptor.java      | 12 ++++++
 .../connector/integration/test/SparkCommonIT.java  |  9 ++++
 .../integration/test/hive/SparkHiveCatalogIT.java  | 10 +++++
 .../test/iceberg/SparkIcebergCatalogIT.java        | 10 +++++
 .../test/jdbc/SparkJdbcMysqlCatalogIT.java         | 10 +++++
 ...ogIT.java => SparkJdbcPostgreSqlCatalogIT.java} | 48 ++++++++++++++--------
 .../test/paimon/SparkPaimonCatalogIT.java          | 10 +++++
 spark-connector/v3.3/spark/build.gradle.kts        |  2 +
 .../GravitinoPostgreSqlCatalogSpark33.java}        | 21 ++++------
 .../test/jdbc/SparkJdbcPostgreSqlCatalogIT33.java} | 27 +++++++-----
 spark-connector/v3.4/spark/build.gradle.kts        |  2 +
 .../GravitinoPostgreSqlCatalogSpark34.java}        | 21 ++++------
 .../test/jdbc/SparkJdbcPostgreSqlCatalogIT34.java} | 27 +++++++-----
 spark-connector/v3.5/spark/build.gradle.kts        |  2 +
 .../GravitinoPostgreSqlCatalogSpark35.java}        | 21 ++++------
 .../test/jdbc/SparkJdbcPostgreSqlCatalogIT35.java} | 27 +++++++-----
 20 files changed, 211 insertions(+), 88 deletions(-)

diff --git 
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/TestDatabaseName.java
 
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/TestDatabaseName.java
index 5f0ec3de4d..72c466629c 100644
--- 
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/TestDatabaseName.java
+++ 
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/TestDatabaseName.java
@@ -102,4 +102,16 @@ public enum TestDatabaseName {
       return this.name().toLowerCase();
     }
   },
+
+  /**
+   * Represents the PostgreSQL database for
+   * 
org.apache.gravitino.spark.connector.integration.test.jdbc.SparkJdbcPostgreSqlCatalogIT.
+   */
+  PG_CATALOG_PG_IT {
+    /** PostgreSQL only accept lowercase database name */
+    @Override
+    public String toString() {
+      return this.name().toLowerCase();
+    }
+  },
 }
diff --git a/spark-connector/spark-common/build.gradle.kts 
b/spark-connector/spark-common/build.gradle.kts
index dc6bbd258e..5a61934295 100644
--- a/spark-connector/spark-common/build.gradle.kts
+++ b/spark-connector/spark-common/build.gradle.kts
@@ -114,6 +114,7 @@ dependencies {
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testImplementation(libs.mysql.driver)
+  testImplementation(libs.postgresql.driver)
   testImplementation(libs.testcontainers)
 
   testImplementation("org.apache.iceberg:iceberg-core:$icebergVersion")
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
index f1cf50f81d..fb095b9f39 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
@@ -25,6 +25,7 @@ public class JdbcPropertiesConstants {
   public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
   public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
   public static final String GRAVITINO_JDBC_URL = "jdbc-url";
+  public static final String GRAVITINO_JDBC_DATABASE = "jdbc-database";
 
   public static final String SPARK_JDBC_URL = "url";
   public static final String SPARK_JDBC_USER = "user";
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConverter.java
 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConverter.java
index 7516646e34..7bd2a86b85 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConverter.java
+++ 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConverter.java
@@ -27,16 +27,29 @@ import 
org.apache.gravitino.spark.connector.PropertiesConverter;
 
 public class JdbcPropertiesConverter implements PropertiesConverter {
 
+  private final boolean supportsTableProperties;
+
   public static class JdbcPropertiesConverterHolder {
     private static final JdbcPropertiesConverter INSTANCE = new 
JdbcPropertiesConverter();
+    private static final JdbcPropertiesConverter PG_INSTANCE = new 
JdbcPropertiesConverter(false);
+  }
+
+  private JdbcPropertiesConverter() {
+    this(true);
   }
 
-  private JdbcPropertiesConverter() {}
+  private JdbcPropertiesConverter(boolean supportsTableProperties) {
+    this.supportsTableProperties = supportsTableProperties;
+  }
 
   public static JdbcPropertiesConverter getInstance() {
     return JdbcPropertiesConverterHolder.INSTANCE;
   }
 
+  public static JdbcPropertiesConverter getPGInstance() {
+    return JdbcPropertiesConverterHolder.PG_INSTANCE;
+  }
+
   private static final Map<String, String> GRAVITINO_CONFIG_TO_JDBC =
       ImmutableMap.of(
           JdbcPropertiesConstants.GRAVITINO_JDBC_URL,
@@ -63,7 +76,16 @@ public class JdbcPropertiesConverter implements 
PropertiesConverter {
 
   @Override
   public Map<String, String> toGravitinoTableProperties(Map<String, String> 
properties) {
-    return new HashMap<>(properties);
+    if (!supportsTableProperties) {
+      for (String key : properties.keySet()) {
+        if (!"owner".equalsIgnoreCase(key)) {
+          throw new UnsupportedOperationException("Doesn't support table 
property " + key);
+        }
+      }
+      return new HashMap<>();
+    } else {
+      return new HashMap<>(properties);
+    }
   }
 
   @Override
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/version/CatalogNameAdaptor.java
 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/version/CatalogNameAdaptor.java
index 9d8594b912..8724941529 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/version/CatalogNameAdaptor.java
+++ 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/version/CatalogNameAdaptor.java
@@ -55,12 +55,24 @@ public class CatalogNameAdaptor {
           "3.5",
           
"org.apache.gravitino.spark.connector.jdbc.GravitinoJdbcCatalogSpark35");
 
+  private static final Map<String, String> pgCatalogNames =
+      ImmutableMap.of(
+          "3.3",
+          
"org.apache.gravitino.spark.connector.jdbc.postgresql.GravitinoPostgreSqlCatalogSpark33",
+          "3.4",
+          
"org.apache.gravitino.spark.connector.jdbc.postgresql.GravitinoPostgreSqlCatalogSpark34",
+          "3.5",
+          
"org.apache.gravitino.spark.connector.jdbc.postgresql.GravitinoPostgreSqlCatalogSpark35");
+
   private static String sparkVersion() {
     return package$.MODULE$.SPARK_VERSION();
   }
 
   private static String getCatalogName(String provider, int majorVersion, int 
minorVersion) {
     if (provider.startsWith("jdbc")) {
+      if (provider.startsWith("jdbc-postgresql")) {
+        return pgCatalogNames.get(String.format("%d.%d", majorVersion, 
minorVersion));
+      }
       return jdbcCatalogNames.get(String.format("%d.%d", majorVersion, 
minorVersion));
     }
     String key =
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/SparkCommonIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/SparkCommonIT.java
index 2eb9e7b9b5..9d8d8fd5da 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/SparkCommonIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/SparkCommonIT.java
@@ -123,6 +123,12 @@ public abstract class SparkCommonIT extends SparkEnvIT {
 
   protected abstract boolean supportsComplexType();
 
+  protected abstract boolean supportsUpdateColumnPosition();
+
+  // @todo temporarily added for: 
https://github.com/apache/gravitino/issues/6907, should be removed
+  // after the issue is addressed
+  protected abstract boolean supportListTable();
+
   protected SparkTableInfoChecker getTableInfoChecker() {
     return SparkTableInfoChecker.create();
   }
@@ -178,6 +184,7 @@ public abstract class SparkCommonIT extends SparkEnvIT {
   }
 
   @Test
+  @EnabledIf("supportListTable")
   void testListTables() {
     String tableName = "t_list";
     dropTableIfExists(tableName);
@@ -376,6 +383,7 @@ public abstract class SparkCommonIT extends SparkEnvIT {
   }
 
   @Test
+  @EnabledIf("supportListTable")
   void testListTable() {
     String table1 = "list1";
     String table2 = "list2";
@@ -496,6 +504,7 @@ public abstract class SparkCommonIT extends SparkEnvIT {
   }
 
   @Test
+  @EnabledIf("supportsUpdateColumnPosition")
   void testUpdateColumnPosition() {
     String tableName = "test_column_position";
     dropTableIfExists(tableName);
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/hive/SparkHiveCatalogIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/hive/SparkHiveCatalogIT.java
index 6ed8e12d64..e8fd08e5be 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/hive/SparkHiveCatalogIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/hive/SparkHiveCatalogIT.java
@@ -94,6 +94,16 @@ public abstract class SparkHiveCatalogIT extends 
SparkCommonIT {
     return true;
   }
 
+  @Override
+  protected boolean supportsUpdateColumnPosition() {
+    return true;
+  }
+
+  @Override
+  protected boolean supportListTable() {
+    return true;
+  }
+
   @Test
   void testCreateHiveFormatPartitionTable() {
     String tableName = "hive_partition_table";
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
index 291f8f25db..78607332ab 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
@@ -119,6 +119,16 @@ public abstract class SparkIcebergCatalogIT extends 
SparkCommonIT {
     return true;
   }
 
+  @Override
+  protected boolean supportsUpdateColumnPosition() {
+    return true;
+  }
+
+  @Override
+  protected boolean supportListTable() {
+    return true;
+  }
+
   @Override
   protected String getTableLocation(SparkTableInfo table) {
     return String.join(File.separator, table.getTableLocation(), "data");
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
index 1b77047fa8..60048d0c79 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
@@ -71,6 +71,16 @@ public abstract class SparkJdbcMysqlCatalogIT extends 
SparkCommonIT {
     return false;
   }
 
+  @Override
+  protected boolean supportsUpdateColumnPosition() {
+    return true;
+  }
+
+  @Override
+  protected boolean supportListTable() {
+    return true;
+  }
+
   @Override
   protected String getCatalogName() {
     return "jdbc_mysql";
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT.java
similarity index 70%
copy from 
spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
copy to 
spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT.java
index 1b77047fa8..d944f315a5 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcMysqlCatalogIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT.java
@@ -18,7 +18,7 @@
  */
 package org.apache.gravitino.spark.connector.integration.test.jdbc;
 
-import static 
org.apache.gravitino.integration.test.util.TestDatabaseName.MYSQL_CATALOG_MYSQL_IT;
+import static 
org.apache.gravitino.integration.test.util.TestDatabaseName.PG_CATALOG_PG_IT;
 
 import com.google.common.collect.Maps;
 import java.util.Map;
@@ -26,15 +26,17 @@ import 
org.apache.gravitino.integration.test.container.ContainerSuite;
 import org.apache.gravitino.spark.connector.integration.test.SparkCommonIT;
 import 
org.apache.gravitino.spark.connector.integration.test.util.SparkTableInfoChecker;
 import org.apache.gravitino.spark.connector.jdbc.JdbcPropertiesConstants;
+import org.junit.jupiter.api.Disabled;
 import org.junit.jupiter.api.Tag;
 
 @Tag("gravitino-docker-test")
-public abstract class SparkJdbcMysqlCatalogIT extends SparkCommonIT {
+@Disabled("Disabled until PG table creation is supported")
+public class SparkJdbcPostgreSqlCatalogIT extends SparkCommonIT {
 
-  protected String mysqlUrl;
-  protected String mysqlUsername;
-  protected String mysqlPassword;
-  protected String mysqlDriver;
+  protected String pgUrl;
+  protected String pgUsername;
+  protected String pgPassword;
+  protected String pgDriver;
 
   @Override
   protected boolean supportsSparkSQLClusteredBy() {
@@ -71,14 +73,24 @@ public abstract class SparkJdbcMysqlCatalogIT extends 
SparkCommonIT {
     return false;
   }
 
+  @Override
+  protected boolean supportsUpdateColumnPosition() {
+    return false;
+  }
+
+  @Override
+  protected boolean supportListTable() {
+    return false;
+  }
+
   @Override
   protected String getCatalogName() {
-    return "jdbc_mysql";
+    return "jdbc_postgresql";
   }
 
   @Override
   protected String getProvider() {
-    return "jdbc-mysql";
+    return "jdbc-postgresql";
   }
 
   @Override
@@ -89,20 +101,22 @@ public abstract class SparkJdbcMysqlCatalogIT extends 
SparkCommonIT {
   @Override
   protected void initCatalogEnv() throws Exception {
     ContainerSuite containerSuite = ContainerSuite.getInstance();
-    containerSuite.startMySQLContainer(MYSQL_CATALOG_MYSQL_IT);
-    mysqlUrl = containerSuite.getMySQLContainer().getJdbcUrl();
-    mysqlUsername = containerSuite.getMySQLContainer().getUsername();
-    mysqlPassword = containerSuite.getMySQLContainer().getPassword();
-    mysqlDriver = 
containerSuite.getMySQLContainer().getDriverClassName(MYSQL_CATALOG_MYSQL_IT);
+    containerSuite.startPostgreSQLContainer(PG_CATALOG_PG_IT);
+    this.pgUrl = containerSuite.getPostgreSQLContainer().getJdbcUrl();
+    this.pgUsername = containerSuite.getPostgreSQLContainer().getUsername();
+    this.pgPassword = containerSuite.getPostgreSQLContainer().getPassword();
+    this.pgDriver = 
containerSuite.getPostgreSQLContainer().getDriverClassName(PG_CATALOG_PG_IT);
   }
 
   @Override
   protected Map<String, String> getCatalogConfigs() {
     Map<String, String> catalogProperties = Maps.newHashMap();
-    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_URL, 
mysqlUrl);
-    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_USER, 
mysqlUsername);
-    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_PASSWORD, 
mysqlPassword);
-    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_DRIVER, 
mysqlDriver);
+    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_URL, 
this.pgUrl);
+    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_USER, 
this.pgUsername);
+    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_PASSWORD, 
this.pgPassword);
+    catalogProperties.put(JdbcPropertiesConstants.GRAVITINO_JDBC_DRIVER, 
this.pgDriver);
+    catalogProperties.put(
+        JdbcPropertiesConstants.GRAVITINO_JDBC_DATABASE, 
PG_CATALOG_PG_IT.toString());
     return catalogProperties;
   }
 }
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/paimon/SparkPaimonCatalogIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/paimon/SparkPaimonCatalogIT.java
index 40afa06085..975cf7e589 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/paimon/SparkPaimonCatalogIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/paimon/SparkPaimonCatalogIT.java
@@ -73,6 +73,16 @@ public abstract class SparkPaimonCatalogIT extends 
SparkCommonIT {
     return true;
   }
 
+  @Override
+  protected boolean supportsUpdateColumnPosition() {
+    return true;
+  }
+
+  @Override
+  protected boolean supportListTable() {
+    return true;
+  }
+
   @Override
   protected boolean supportsReplaceColumns() {
     // Paimon doesn't support replace columns, because it doesn't support drop 
all fields in table.
diff --git a/spark-connector/v3.3/spark/build.gradle.kts 
b/spark-connector/v3.3/spark/build.gradle.kts
index 6b633434e4..257020a84c 100644
--- a/spark-connector/v3.3/spark/build.gradle.kts
+++ b/spark-connector/v3.3/spark/build.gradle.kts
@@ -123,6 +123,7 @@ dependencies {
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testImplementation(libs.mysql.driver)
+  testImplementation(libs.postgresql.driver)
   testImplementation(libs.testcontainers)
 
   // org.apache.iceberg.rest.RESTSerializers#registerAll(ObjectMapper) has 
different method signature for iceberg-core and iceberg-spark-runtime package, 
we must make sure iceberg-core is in front to start up MiniGravitino server.
@@ -167,6 +168,7 @@ tasks.test {
     dependsOn(":iceberg:iceberg-rest-server:jar")
     dependsOn(":catalogs:catalog-lakehouse-paimon:jar")
     dependsOn(":catalogs:catalog-jdbc-mysql:jar")
+    dependsOn(":catalogs:catalog-jdbc-postgresql:jar")
   }
 }
 
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/v3.3/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark33.java
similarity index 58%
copy from 
spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
copy to 
spark-connector/v3.3/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark33.java
index f1cf50f81d..147e23a889 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/v3.3/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark33.java
@@ -16,18 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.gravitino.spark.connector.jdbc.postgresql;
 
-package org.apache.gravitino.spark.connector.jdbc;
+import org.apache.gravitino.spark.connector.PropertiesConverter;
+import org.apache.gravitino.spark.connector.jdbc.GravitinoJdbcCatalogSpark33;
+import org.apache.gravitino.spark.connector.jdbc.JdbcPropertiesConverter;
 
-public class JdbcPropertiesConstants {
-
-  public static final String GRAVITINO_JDBC_USER = "jdbc-user";
-  public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
-  public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
-  public static final String GRAVITINO_JDBC_URL = "jdbc-url";
-
-  public static final String SPARK_JDBC_URL = "url";
-  public static final String SPARK_JDBC_USER = "user";
-  public static final String SPARK_JDBC_PASSWORD = "password";
-  public static final String SPARK_JDBC_DRIVER = "driver";
+public class GravitinoPostgreSqlCatalogSpark33 extends 
GravitinoJdbcCatalogSpark33 {
+  @Override
+  protected PropertiesConverter getPropertiesConverter() {
+    return JdbcPropertiesConverter.getPGInstance();
+  }
 }
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT33.java
similarity index 52%
copy from 
spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
copy to 
spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT33.java
index f1cf50f81d..c38b6fb9ad 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT33.java
@@ -16,18 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.gravitino.spark.connector.integration.test.jdbc;
 
-package org.apache.gravitino.spark.connector.jdbc;
+import 
org.apache.gravitino.spark.connector.jdbc.postgresql.GravitinoPostgreSqlCatalogSpark33;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
-public class JdbcPropertiesConstants {
+@Disabled("Disabled until PG table creation is supported")
+public class SparkJdbcPostgreSqlCatalogIT33 extends 
SparkJdbcPostgreSqlCatalogIT {
 
-  public static final String GRAVITINO_JDBC_USER = "jdbc-user";
-  public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
-  public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
-  public static final String GRAVITINO_JDBC_URL = "jdbc-url";
-
-  public static final String SPARK_JDBC_URL = "url";
-  public static final String SPARK_JDBC_USER = "user";
-  public static final String SPARK_JDBC_PASSWORD = "password";
-  public static final String SPARK_JDBC_DRIVER = "driver";
+  @Test
+  void testCatalogClassName() {
+    String catalogClass =
+        getSparkSession()
+            .sessionState()
+            .conf()
+            .getConfString("spark.sql.catalog." + getCatalogName());
+    Assertions.assertEquals(GravitinoPostgreSqlCatalogSpark33.class.getName(), 
catalogClass);
+  }
 }
diff --git a/spark-connector/v3.4/spark/build.gradle.kts 
b/spark-connector/v3.4/spark/build.gradle.kts
index 08ab9ca9ca..7789017bd9 100644
--- a/spark-connector/v3.4/spark/build.gradle.kts
+++ b/spark-connector/v3.4/spark/build.gradle.kts
@@ -123,6 +123,7 @@ dependencies {
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testImplementation(libs.mysql.driver)
+  testImplementation(libs.postgresql.driver)
   testImplementation(libs.testcontainers)
 
   // org.apache.iceberg.rest.RESTSerializers#registerAll(ObjectMapper) has 
different method signature for iceberg-core and iceberg-spark-runtime package, 
we must make sure iceberg-core is in front to start up MiniGravitino server.
@@ -167,6 +168,7 @@ tasks.test {
     dependsOn(":iceberg:iceberg-rest-server:jar")
     dependsOn(":catalogs:catalog-lakehouse-paimon:jar")
     dependsOn(":catalogs:catalog-jdbc-mysql:jar")
+    dependsOn(":catalogs:catalog-jdbc-postgresql:jar")
   }
 }
 
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/v3.4/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark34.java
similarity index 58%
copy from 
spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
copy to 
spark-connector/v3.4/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark34.java
index f1cf50f81d..545f85378c 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/v3.4/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark34.java
@@ -16,18 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.gravitino.spark.connector.jdbc.postgresql;
 
-package org.apache.gravitino.spark.connector.jdbc;
+import org.apache.gravitino.spark.connector.PropertiesConverter;
+import org.apache.gravitino.spark.connector.jdbc.GravitinoJdbcCatalog;
+import org.apache.gravitino.spark.connector.jdbc.JdbcPropertiesConverter;
 
-public class JdbcPropertiesConstants {
-
-  public static final String GRAVITINO_JDBC_USER = "jdbc-user";
-  public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
-  public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
-  public static final String GRAVITINO_JDBC_URL = "jdbc-url";
-
-  public static final String SPARK_JDBC_URL = "url";
-  public static final String SPARK_JDBC_USER = "user";
-  public static final String SPARK_JDBC_PASSWORD = "password";
-  public static final String SPARK_JDBC_DRIVER = "driver";
+public class GravitinoPostgreSqlCatalogSpark34 extends GravitinoJdbcCatalog {
+  @Override
+  protected PropertiesConverter getPropertiesConverter() {
+    return JdbcPropertiesConverter.getPGInstance();
+  }
 }
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT34.java
similarity index 52%
copy from 
spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
copy to 
spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT34.java
index f1cf50f81d..836bde5da1 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT34.java
@@ -16,18 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.gravitino.spark.connector.integration.test.jdbc;
 
-package org.apache.gravitino.spark.connector.jdbc;
+import 
org.apache.gravitino.spark.connector.jdbc.postgresql.GravitinoPostgreSqlCatalogSpark34;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
-public class JdbcPropertiesConstants {
+@Disabled("Disabled until PG table creation is supported")
+public class SparkJdbcPostgreSqlCatalogIT34 extends 
SparkJdbcPostgreSqlCatalogIT {
 
-  public static final String GRAVITINO_JDBC_USER = "jdbc-user";
-  public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
-  public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
-  public static final String GRAVITINO_JDBC_URL = "jdbc-url";
-
-  public static final String SPARK_JDBC_URL = "url";
-  public static final String SPARK_JDBC_USER = "user";
-  public static final String SPARK_JDBC_PASSWORD = "password";
-  public static final String SPARK_JDBC_DRIVER = "driver";
+  @Test
+  void testCatalogClassName() {
+    String catalogClass =
+        getSparkSession()
+            .sessionState()
+            .conf()
+            .getConfString("spark.sql.catalog." + getCatalogName());
+    Assertions.assertEquals(GravitinoPostgreSqlCatalogSpark34.class.getName(), 
catalogClass);
+  }
 }
diff --git a/spark-connector/v3.5/spark/build.gradle.kts 
b/spark-connector/v3.5/spark/build.gradle.kts
index 782d514aed..f6a5417ac2 100644
--- a/spark-connector/v3.5/spark/build.gradle.kts
+++ b/spark-connector/v3.5/spark/build.gradle.kts
@@ -125,6 +125,7 @@ dependencies {
   testImplementation(libs.junit.jupiter.api)
   testImplementation(libs.junit.jupiter.params)
   testImplementation(libs.mysql.driver)
+  testImplementation(libs.postgresql.driver)
   testImplementation(libs.testcontainers)
 
   // org.apache.iceberg.rest.RESTSerializers#registerAll(ObjectMapper) has 
different method signature for iceberg-core and iceberg-spark-runtime package, 
we must make sure iceberg-core is in front to start up MiniGravitino server.
@@ -169,6 +170,7 @@ tasks.test {
     dependsOn(":iceberg:iceberg-rest-server:jar")
     dependsOn(":catalogs:catalog-lakehouse-paimon:jar")
     dependsOn(":catalogs:catalog-jdbc-mysql:jar")
+    dependsOn(":catalogs:catalog-jdbc-postgresql:jar")
   }
 }
 
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/v3.5/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark35.java
similarity index 58%
copy from 
spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
copy to 
spark-connector/v3.5/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark35.java
index f1cf50f81d..709013786c 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/v3.5/spark/src/main/java/org/apache/gravitino/spark/connector/jdbc/postgresql/GravitinoPostgreSqlCatalogSpark35.java
@@ -16,18 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.gravitino.spark.connector.jdbc.postgresql;
 
-package org.apache.gravitino.spark.connector.jdbc;
+import org.apache.gravitino.spark.connector.PropertiesConverter;
+import org.apache.gravitino.spark.connector.jdbc.GravitinoJdbcCatalog;
+import org.apache.gravitino.spark.connector.jdbc.JdbcPropertiesConverter;
 
-public class JdbcPropertiesConstants {
-
-  public static final String GRAVITINO_JDBC_USER = "jdbc-user";
-  public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
-  public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
-  public static final String GRAVITINO_JDBC_URL = "jdbc-url";
-
-  public static final String SPARK_JDBC_URL = "url";
-  public static final String SPARK_JDBC_USER = "user";
-  public static final String SPARK_JDBC_PASSWORD = "password";
-  public static final String SPARK_JDBC_DRIVER = "driver";
+public class GravitinoPostgreSqlCatalogSpark35 extends GravitinoJdbcCatalog {
+  @Override
+  protected PropertiesConverter getPropertiesConverter() {
+    return JdbcPropertiesConverter.getPGInstance();
+  }
 }
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
 
b/spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT35.java
similarity index 52%
copy from 
spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
copy to 
spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT35.java
index f1cf50f81d..893b98cc71 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/jdbc/JdbcPropertiesConstants.java
+++ 
b/spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/jdbc/SparkJdbcPostgreSqlCatalogIT35.java
@@ -16,18 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.gravitino.spark.connector.integration.test.jdbc;
 
-package org.apache.gravitino.spark.connector.jdbc;
+import 
org.apache.gravitino.spark.connector.jdbc.postgresql.GravitinoPostgreSqlCatalogSpark35;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
-public class JdbcPropertiesConstants {
+@Disabled("Disabled until PG table creation is supported")
+public class SparkJdbcPostgreSqlCatalogIT35 extends 
SparkJdbcPostgreSqlCatalogIT {
 
-  public static final String GRAVITINO_JDBC_USER = "jdbc-user";
-  public static final String GRAVITINO_JDBC_PASSWORD = "jdbc-password";
-  public static final String GRAVITINO_JDBC_DRIVER = "jdbc-driver";
-  public static final String GRAVITINO_JDBC_URL = "jdbc-url";
-
-  public static final String SPARK_JDBC_URL = "url";
-  public static final String SPARK_JDBC_USER = "user";
-  public static final String SPARK_JDBC_PASSWORD = "password";
-  public static final String SPARK_JDBC_DRIVER = "driver";
+  @Test
+  void testCatalogClassName() {
+    String catalogClass =
+        getSparkSession()
+            .sessionState()
+            .conf()
+            .getConfString("spark.sql.catalog." + getCatalogName());
+    Assertions.assertEquals(GravitinoPostgreSqlCatalogSpark35.class.getName(), 
catalogClass);
+  }
 }


Reply via email to