This is an automated email from the ASF dual-hosted git repository.

mchades pushed a commit to branch branch-1.0
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/branch-1.0 by this push:
     new 30b2b17f11 [#8979] fix(sql-scripts): add sql scripts test and fix bug 
(#9030)
30b2b17f11 is described below

commit 30b2b17f117d180e113d97c7986f22cd906a8de3
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Thu Nov 6 21:01:49 2025 +0800

    [#8979] fix(sql-scripts): add sql scripts test and fix bug (#9030)
    
    ### What changes were proposed in this pull request?
    
    add sql scripts test and fix bug
    
    ### Why are the changes needed?
    
    Fix: #8979
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    CI pass
    
    Co-authored-by: mchades <[email protected]>
---
 .../apache/gravitino/storage/TestSQLScripts.java   | 176 +++++++++++++++++++++
 docs/security/credential-vending.md                |   6 +-
 scripts/h2/upgrade-0.9.0-to-1.0.0-h2.sql           |   4 +-
 .../upgrade-0.8.0-to-0.9.0-postgresql.sql          |   2 +-
 .../upgrade-0.9.0-to-1.0.0-postgresql.sql          |   4 +
 5 files changed, 186 insertions(+), 6 deletions(-)

diff --git 
a/core/src/test/java/org/apache/gravitino/storage/TestSQLScripts.java 
b/core/src/test/java/org/apache/gravitino/storage/TestSQLScripts.java
new file mode 100644
index 0000000000..92f02fcf90
--- /dev/null
+++ b/core/src/test/java/org/apache/gravitino/storage/TestSQLScripts.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.storage;
+
+import static 
org.apache.gravitino.integration.test.util.TestDatabaseName.MYSQL_JDBC_BACKEND;
+import static 
org.apache.gravitino.integration.test.util.TestDatabaseName.PG_JDBC_BACKEND;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.gravitino.integration.test.container.MySQLContainer;
+import org.apache.gravitino.integration.test.container.PostgreSQLContainer;
+import org.apache.gravitino.integration.test.util.BaseIT;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+
+public class TestSQLScripts extends BaseIT {
+
+  private String jdbcBackend;
+  private Path scriptDir;
+  @TempDir private File tempDir;
+  private Map<String, Connection> versionConnections;
+
+  @BeforeAll
+  public void startIntegrationTest() {
+    jdbcBackend = System.getenv("jdbcBackend");
+    Assertions.assertNotNull(jdbcBackend, "jdbcBackend environment variable is 
not set");
+
+    String gravitinoHome = System.getenv("GRAVITINO_HOME");
+    Assertions.assertNotNull(gravitinoHome, "GRAVITINO_HOME environment 
variable is not set");
+    scriptDir = Path.of(gravitinoHome, "scripts", jdbcBackend.toLowerCase());
+    Assertions.assertTrue(Files.exists(scriptDir), "Script directory does not 
exist: " + scriptDir);
+    versionConnections = new HashMap<>();
+  }
+
+  @AfterAll
+  public void stopIntegrationTest() {
+    // Close all connections
+    for (Connection conn : versionConnections.values()) {
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+    }
+    versionConnections.clear();
+  }
+
+  @Test
+  public void testSQLScripts() throws SQLException {
+    File[] scriptFiles = scriptDir.toFile().listFiles();
+    Assertions.assertNotNull(scriptFiles, "No script files found in " + 
scriptDir);
+    // Sort files to ensure the correct execution order (schema -> upgrade)
+    Arrays.sort(scriptFiles, Comparator.comparing(File::getName));
+
+    // A map to store connections for different schema versions
+    Pattern schemaPattern =
+        Pattern.compile("schema-([\\d.]+)-" + jdbcBackend.toLowerCase() + 
"\\.sql");
+    Pattern upgradePattern =
+        Pattern.compile("upgrade-([\\d.]+)-to-([\\d.]+)-" + 
jdbcBackend.toLowerCase() + "\\.sql");
+
+    for (File scriptFile : scriptFiles) {
+      Matcher schemaMatcher = schemaPattern.matcher(scriptFile.getName());
+      Matcher upgradeMatcher = upgradePattern.matcher(scriptFile.getName());
+
+      if (schemaMatcher.matches()) {
+        String version = schemaMatcher.group(1);
+        String dbName = "schema_" + version.replace('.', '_');
+
+        Connection conn = getSQLConnection(jdbcBackend, dbName);
+        Assertions.assertDoesNotThrow(
+            () -> executeSQLScript(conn, scriptFile),
+            "Failed to execute schema script for version " + version);
+        versionConnections.put(version, conn);
+
+      } else if (upgradeMatcher.matches()) {
+        String fromVersion = upgradeMatcher.group(1);
+
+        Connection conn = versionConnections.get(fromVersion);
+        Assertions.assertNotNull(
+            conn, "No existing database connection found for version " + 
fromVersion);
+        Assertions.assertDoesNotThrow(
+            () -> executeSQLScript(conn, scriptFile),
+            "Failed to execute upgrade script" + " in file " + 
scriptFile.getName());
+      } else {
+        Assertions.fail("Unrecognized script file name: " + 
scriptFile.getName());
+      }
+    }
+  }
+
+  private void executeSQLScript(Connection connection, File scriptFile) throws 
IOException {
+    String sqlContent = FileUtils.readFileToString(scriptFile, "UTF-8");
+    Arrays.stream(sqlContent.split(";"))
+        .map(String::trim)
+        .filter(s -> !s.isEmpty())
+        .forEach(
+            sql -> {
+              try (Statement stmt = connection.createStatement()) {
+                stmt.execute(sql);
+              } catch (SQLException e) {
+                throw new RuntimeException("Failed to execute SQL: " + sql, e);
+              }
+            });
+  }
+
+  private Connection getSQLConnection(String jdbcBackend, String dBName) 
throws SQLException {
+    if ("h2".equalsIgnoreCase(jdbcBackend)) {
+      // Use a temporary directory for the database files
+      String dbPath = new File(tempDir, dBName).getAbsolutePath();
+      // AUTO_SERVER=TRUE allows multiple connections to the same database in 
the same process
+      String url = 
String.format("jdbc:h2:file:%s;MODE=MYSQL;AUTO_SERVER=TRUE", dbPath);
+      return DriverManager.getConnection(url, "sa", "");
+
+    } else if ("mysql".equalsIgnoreCase(jdbcBackend)) {
+      containerSuite.startMySQLContainer(MYSQL_JDBC_BACKEND);
+      MySQLContainer mySQLContainer = containerSuite.getMySQLContainer();
+      String mysqlUrl = mySQLContainer.getJdbcUrl(MYSQL_JDBC_BACKEND);
+      Connection connection =
+          DriverManager.getConnection(
+              StringUtils.substring(mysqlUrl, 0, mysqlUrl.lastIndexOf("/")), 
"root", "root");
+      Statement statement = connection.createStatement();
+      statement.execute("create database if not exists " + dBName);
+      connection.setCatalog(dBName);
+      return connection;
+
+    } else if ("PostgreSQL".equalsIgnoreCase(jdbcBackend)) {
+      containerSuite.startPostgreSQLContainer(PG_JDBC_BACKEND);
+      PostgreSQLContainer pgContainer = 
containerSuite.getPostgreSQLContainer();
+      String pgUrlWithoutSchema = pgContainer.getJdbcUrl(PG_JDBC_BACKEND);
+      Connection connection =
+          DriverManager.getConnection(
+              pgUrlWithoutSchema, pgContainer.getUsername(), 
pgContainer.getPassword());
+      connection.setCatalog(PG_JDBC_BACKEND.toString().toLowerCase());
+      Statement statement = connection.createStatement();
+      statement.execute("create schema if not exists " + dBName);
+      connection.setSchema(dBName);
+      return connection;
+    }
+    Assertions.fail("Unsupported JDBC backend: " + jdbcBackend);
+    return null;
+  }
+}
diff --git a/docs/security/credential-vending.md 
b/docs/security/credential-vending.md
index 703cae95b7..28cb71da43 100644
--- a/docs/security/credential-vending.md
+++ b/docs/security/credential-vending.md
@@ -155,13 +155,13 @@ For Gravitino Iceberg REST server, please ensure that the 
credential file can be
 
 ## Custom credentials
 
-Gravitino supports custom credentials, you can implement the 
`org.apache.gravitino.credential.CredentialProvider` interface to support 
custom credentials, and place the corresponding jar to the classpath of Iceberg 
catalog server or Hadoop catalog.
+Gravitino supports custom credentials, you can implement the 
`org.apache.gravitino.credential.CredentialProvider` interface to support 
custom credentials, and place the corresponding jar to the classpath of Iceberg 
catalog server or Fileset catalog.
 
 ## Deployment
 
 Besides setting credentials related configuration, please download Gravitino 
cloud bundle jar and place it in the classpath of Iceberg REST server or Hadoop 
catalog.
 
-For Hadoop catalog, please use Gravitino cloud bundle jar with Hadoop and 
cloud packages:
+For Fileset catalog, please use Gravitino cloud bundle jar with Hadoop and 
cloud packages:
 
 - [Gravitino AWS bundle jar with Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aws-bundle)
 - [Gravitino Aliyun bundle jar with Hadoop and cloud 
packages](https://mvnrepository.com/artifact/org.apache.gravitino/gravitino-aliyun-bundle)
@@ -182,7 +182,7 @@ For OSS, Iceberg doesn't provide Iceberg Aliyun bundle jar 
which contains OSS pa
 The classpath of the server:
 
 - Iceberg REST server: the classpath differs in different deploy mode, please 
refer to [Server management](../iceberg-rest-service.md#server-management) part.
-- Hadoop catalog: `catalogs/hadoop/libs/`
+- Fileset catalog: `catalogs/fileset/libs/`
 
 ## Usage example
 
diff --git a/scripts/h2/upgrade-0.9.0-to-1.0.0-h2.sql 
b/scripts/h2/upgrade-0.9.0-to-1.0.0-h2.sql
index 6bd4161e49..40f1f0fe0d 100644
--- a/scripts/h2/upgrade-0.9.0-to-1.0.0-h2.sql
+++ b/scripts/h2/upgrade-0.9.0-to-1.0.0-h2.sql
@@ -41,7 +41,7 @@ CREATE TABLE IF NOT EXISTS `policy_version_info` (
     `deleted_at` BIGINT(20) UNSIGNED NOT NULL DEFAULT 0 COMMENT 'policy 
deleted at',
     PRIMARY KEY (`id`),
     UNIQUE KEY `uk_pod_ver_del` (`policy_id`, `version`, `deleted_at`),
-    KEY `idx_mid` (`metalake_id`)
+    KEY `idx_pmid` (`metalake_id`)
 ) ENGINE=InnoDB;
 
 CREATE TABLE IF NOT EXISTS `policy_relation_meta` (
@@ -56,7 +56,7 @@ CREATE TABLE IF NOT EXISTS `policy_relation_meta` (
     PRIMARY KEY (`id`),
     UNIQUE KEY `uk_pi_mi_mo_del` (`policy_id`, `metadata_object_id`, 
`metadata_object_type`, `deleted_at`),
     KEY `idx_pid` (`policy_id`),
-    KEY `idx_mid` (`metadata_object_id`)
+    KEY `idx_prmid` (`metadata_object_id`)
 ) ENGINE=InnoDB;
 
 -- using default 'unknown' to fill in the new column for compatibility
diff --git a/scripts/postgresql/upgrade-0.8.0-to-0.9.0-postgresql.sql 
b/scripts/postgresql/upgrade-0.8.0-to-0.9.0-postgresql.sql
index ba45526498..f4cc22dea6 100644
--- a/scripts/postgresql/upgrade-0.8.0-to-0.9.0-postgresql.sql
+++ b/scripts/postgresql/upgrade-0.8.0-to-0.9.0-postgresql.sql
@@ -20,7 +20,7 @@
 -- using default 'unknown' to fill in the new column for compatibility
 ALTER TABLE fileset_version_info ADD COLUMN storage_location_name VARCHAR(256) 
NOT NULL DEFAULT 'unknown';
 COMMENT ON COLUMN fileset_version_info.storage_location_name IS 'fileset 
storage location name';
-ALTER TABLE fileset_version_info DROP CONSTRAINT uk_fid_ver_del;
+ALTER TABLE fileset_version_info DROP CONSTRAINT 
fileset_version_info_fileset_id_version_deleted_at_key;
 ALTER TABLE fileset_version_info ADD CONSTRAINT uk_fid_ver_sto_del UNIQUE 
(fileset_id, version, storage_location_name, deleted_at);
 -- remove the default value for storage_location_name
 ALTER TABLE fileset_version_info ALTER COLUMN storage_location_name DROP 
DEFAULT;
\ No newline at end of file
diff --git a/scripts/postgresql/upgrade-0.9.0-to-1.0.0-postgresql.sql 
b/scripts/postgresql/upgrade-0.9.0-to-1.0.0-postgresql.sql
index 2c25a0240e..5d8f541fca 100644
--- a/scripts/postgresql/upgrade-0.9.0-to-1.0.0-postgresql.sql
+++ b/scripts/postgresql/upgrade-0.9.0-to-1.0.0-postgresql.sql
@@ -107,6 +107,10 @@ CREATE TABLE IF NOT EXISTS statistic_meta (
     statistic_value TEXT NOT NULL,
     metadata_object_id BIGINT NOT NULL,
     metadata_object_type VARCHAR(64) NOT NULL,
+    audit_info TEXT NOT NULL,
+    current_version INT NOT NULL DEFAULT 1,
+    last_version INT NOT NULL DEFAULT 1,
+    deleted_at BIGINT NOT NULL DEFAULT 0,
     PRIMARY KEY (statistic_id),
     UNIQUE (statistic_name, metadata_object_id, deleted_at)
 );

Reply via email to