yuqi1129 commented on code in PR #9207:
URL: https://github.com/apache/gravitino/pull/9207#discussion_r2564638565
##########
core/src/main/java/org/apache/gravitino/storage/relational/mapper/provider/postgresql/PolicyMetaPostgreSQLProvider.java:
##########
@@ -61,21 +59,21 @@ public String insertPolicyMetaOnDuplicateKeyUpdate(PolicyPO
policyPO) {
+ " (policy_id, policy_name, policy_type, metalake_id,"
+ " audit_info, current_version, last_version, deleted_at)"
+ " VALUES ("
- + " #{policyPO.policyId},"
- + " #{policyPO.policyName},"
- + " #{policyPO.policyType},"
- + " #{policyPO.metalakeId},"
- + " #{policyPO.auditInfo},"
- + " #{policyPO.currentVersion},"
- + " #{policyPO.lastVersion},"
- + " #{policyPO.deletedAt})"
+ + " #{policyMeta.policyId},"
Review Comment:
`policyMeta` should be the wrong name? I do not find it in the method
signature
##########
core/src/test/java/org/apache/gravitino/storage/relational/BackendTestExtension.java:
##########
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.gravitino.storage.relational;
+
+import static org.apache.gravitino.Configs.DEFAULT_ENTITY_RELATIONAL_STORE;
+import static
org.apache.gravitino.Configs.DEFAULT_RELATIONAL_JDBC_BACKEND_MAX_CONNECTIONS;
+import static
org.apache.gravitino.Configs.DEFAULT_RELATIONAL_JDBC_BACKEND_MAX_WAIT_MILLISECONDS;
+import static
org.apache.gravitino.Configs.ENTITY_RELATIONAL_JDBC_BACKEND_DRIVER;
+import static
org.apache.gravitino.Configs.ENTITY_RELATIONAL_JDBC_BACKEND_MAX_CONNECTIONS;
+import static
org.apache.gravitino.Configs.ENTITY_RELATIONAL_JDBC_BACKEND_PASSWORD;
+import static org.apache.gravitino.Configs.ENTITY_RELATIONAL_JDBC_BACKEND_URL;
+import static org.apache.gravitino.Configs.ENTITY_RELATIONAL_JDBC_BACKEND_USER;
+import static
org.apache.gravitino.Configs.ENTITY_RELATIONAL_JDBC_BACKEND_WAIT_MILLISECONDS;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.stream.Stream;
+import org.apache.commons.io.FileUtils;
+import org.apache.gravitino.Config;
+import org.apache.gravitino.Configs;
+import org.apache.gravitino.integration.test.util.BaseIT;
+import org.apache.gravitino.storage.relational.service.EntityIdService;
+import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
+import org.junit.jupiter.api.extension.BeforeEachCallback;
+import org.junit.jupiter.api.extension.Extension;
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.api.extension.TestTemplateInvocationContext;
+import org.junit.jupiter.api.extension.TestTemplateInvocationContextProvider;
+import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class BackendTestExtension
+ implements TestTemplateInvocationContextProvider, BeforeAllCallback,
AfterAllCallback {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(BackendTestExtension.class);
+ private static final String DOCKER_TEST_FLAG = "dockerTest";
+
+ private static final ExtensionContext.Namespace NAMESPACE =
+ ExtensionContext.Namespace.create(BackendTestExtension.class);
+ private static final String STORE_KEY = "BACKEND_MAP";
+
+ @Override
+ public void beforeAll(ExtensionContext context) {
+ // Initialize a Map and store it at the Class level.
+ // Key: backendType ("h2", "mysql"...), Value: RelationalBackend instance
+ context.getStore(NAMESPACE).put(STORE_KEY, new ConcurrentHashMap<String,
RelationalBackend>());
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public void afterAll(ExtensionContext context) {
+ // Test class ended, close all started Backend
+ ConcurrentHashMap<String, RelationalBackend> map =
+ (ConcurrentHashMap<String, RelationalBackend>)
context.getStore(NAMESPACE).get(STORE_KEY);
+
+ if (map != null) {
+ map.forEach(
+ (type, backend) -> {
+ try {
+ LOG.info("Tearing down backend: {}", type);
+ backend.close();
+ // H2 special cleaning logic
+ if ("h2".equals(type) && backend instanceof H2BackendWrapper) {
+ ((H2BackendWrapper) backend).cleanFile();
+ }
+ } catch (Exception e) {
+ LOG.error("Failed to close backend {}", type, e);
+ }
+ });
+ }
+ }
+
+ @Override
+ public boolean supportsTestTemplate(ExtensionContext context) {
+ return true;
+ }
+
+ @Override
+ public Stream<TestTemplateInvocationContext>
provideTestTemplateInvocationContexts(
+ ExtensionContext context) {
+ List<String> backendsToTest = new ArrayList<>();
+ backendsToTest.add("h2"); // Always test with H2
+
+ String dockerTest = System.getenv(DOCKER_TEST_FLAG);
+ if ("true".equalsIgnoreCase(dockerTest)) {
+ backendsToTest.add("mysql");
+ backendsToTest.add("postgresql");
+ LOG.info("Running tests with H2, MySQL, and PostgreSQL backends.");
+ } else {
+ LOG.info(
+ "Running tests with H2 backend only. Set env var 'dockerTest=true'
to include all backends.");
+ }
+
+ return backendsToTest.stream().map(BackendInvocationContext::new);
+ }
+
+ private static class BackendInvocationContext implements
TestTemplateInvocationContext {
+ private final String backendType;
+
+ public BackendInvocationContext(String backendType) {
+ this.backendType = backendType;
+ }
+
+ @Override
+ public String getDisplayName(int invocationIndex) {
+ return String.format("[%s Backend]", backendType.toUpperCase());
+ }
+
+ @Override
+ public List<Extension> getAdditionalExtensions() {
+ return Collections.singletonList(new BackendSetupCallback(backendType));
+ }
+ }
+
+ private static class BackendSetupCallback implements BeforeEachCallback {
Review Comment:
Does the `BeforeEach` here mean each test in the test class or each test
mode here?
##########
core/src/test/java/org/apache/gravitino/storage/TestSQLScripts.java:
##########
@@ -18,164 +18,139 @@
*/
package org.apache.gravitino.storage;
-import static
org.apache.gravitino.integration.test.util.TestDatabaseName.MYSQL_JDBC_BACKEND;
-import static
org.apache.gravitino.integration.test.util.TestDatabaseName.PG_JDBC_BACKEND;
-
import java.io.File;
import java.io.IOException;
-import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
-import java.sql.DriverManager;
+import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.gravitino.integration.test.container.MySQLContainer;
-import org.apache.gravitino.integration.test.container.PostgreSQLContainer;
-import org.apache.gravitino.integration.test.util.BaseIT;
-import org.junit.jupiter.api.AfterAll;
+import org.apache.gravitino.storage.relational.TestJDBCBackend;
+import org.apache.gravitino.storage.relational.session.SqlSessionFactoryHelper;
+import org.apache.ibatis.session.SqlSession;
import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.io.TempDir;
-
-public class TestSQLScripts extends BaseIT {
-
- private String jdbcBackend;
- private Path scriptDir;
- @TempDir private File tempDir;
- private Map<String, Connection> versionConnections;
+import org.junit.jupiter.api.TestTemplate;
- @BeforeAll
- public void startIntegrationTest() {
- jdbcBackend = System.getenv("jdbcBackend");
- Assertions.assertNotNull(jdbcBackend, "jdbcBackend environment variable is
not set");
+public class TestSQLScripts extends TestJDBCBackend {
+ @TestTemplate
+ public void testSQLScripts() throws SQLException, IOException {
String gravitinoHome = System.getenv("GRAVITINO_HOME");
Assertions.assertNotNull(gravitinoHome, "GRAVITINO_HOME environment
variable is not set");
- scriptDir = Path.of(gravitinoHome, "scripts", jdbcBackend.toLowerCase());
- Assertions.assertTrue(Files.exists(scriptDir), "Script directory does not
exist: " + scriptDir);
- versionConnections = new HashMap<>();
- }
-
- @AfterAll
- public void stopIntegrationTest() {
- // Close all connections
- for (Connection conn : versionConnections.values()) {
- if (conn != null) {
- try {
- conn.close();
- } catch (SQLException e) {
- // Ignore
- }
- }
- }
- versionConnections.clear();
- }
+ Path scriptDir = Path.of(gravitinoHome, "scripts",
backendType.toLowerCase());
- @Test
- public void testSQLScripts() throws SQLException {
File[] scriptFiles = scriptDir.toFile().listFiles();
Assertions.assertNotNull(scriptFiles, "No script files found in " +
scriptDir);
// Sort files to ensure the correct execution order (schema -> upgrade)
Arrays.sort(scriptFiles, Comparator.comparing(File::getName));
// A map to store connections for different schema versions
Pattern schemaPattern =
- Pattern.compile("schema-([\\d.]+)-" + jdbcBackend.toLowerCase() +
"\\.sql");
+ Pattern.compile("schema-([\\d.]+)-" + backendType.toLowerCase() +
"\\.sql");
Pattern upgradePattern =
- Pattern.compile("upgrade-([\\d.]+)-to-([\\d.]+)-" +
jdbcBackend.toLowerCase() + "\\.sql");
+ Pattern.compile("upgrade-([\\d.]+)-to-([\\d.]+)-" +
backendType.toLowerCase() + "\\.sql");
Pattern metricsPattern =
- Pattern.compile("iceberg-metrics-schema-([\\d.]+)-" +
jdbcBackend.toLowerCase() + "\\.sql");
+ Pattern.compile("iceberg-metrics-schema-([\\d.]+)-" +
backendType.toLowerCase() + "\\.sql");
+ Map<String, List<File>> versionScrips = new HashMap<>();
for (File scriptFile : scriptFiles) {
Matcher schemaMatcher = schemaPattern.matcher(scriptFile.getName());
Matcher upgradeMatcher = upgradePattern.matcher(scriptFile.getName());
Matcher metricsMatcher = metricsPattern.matcher(scriptFile.getName());
if (schemaMatcher.matches()) {
String version = schemaMatcher.group(1);
- String dbName = "schema_" + version.replace('.', '_');
-
- Connection conn = getSQLConnection(jdbcBackend, dbName);
- Assertions.assertDoesNotThrow(
- () -> executeSQLScript(conn, scriptFile),
- "Failed to execute schema script for version " + version);
- versionConnections.put(version, conn);
+ versionScrips.computeIfAbsent(version, k -> new
ArrayList<>()).add(scriptFile);
} else if (upgradeMatcher.matches()) {
String fromVersion = upgradeMatcher.group(1);
+ Assertions.assertTrue(
+ versionScrips.containsKey(fromVersion), "No schema script found
for " + fromVersion);
- Connection conn = versionConnections.get(fromVersion);
- Assertions.assertNotNull(
- conn, "No existing database connection found for version " +
fromVersion);
- Assertions.assertDoesNotThrow(
- () -> executeSQLScript(conn, scriptFile),
- "Failed to execute upgrade script" + " in file " +
scriptFile.getName());
} else if (metricsMatcher.matches()) {
- // ignore iceberg metrics scripts for now
+ String version = metricsMatcher.group(1);
+ versionScrips.computeIfAbsent(version, k -> new
ArrayList<>()).add(scriptFile);
+
} else {
Assertions.fail("Unrecognized script file name: " +
scriptFile.getName());
}
}
+
+ for (List<File> scripts : versionScrips.values()) {
+ dropAllTables();
+ for (File scriptFile : scripts) {
+ String sqlContent = FileUtils.readFileToString(scriptFile, "UTF-8");
+ List<String> ddls =
+ Arrays.stream(sqlContent.split(";"))
+ .map(String::trim)
+ .filter(s -> !s.isEmpty())
+ .toList();
+
+ try (SqlSession sqlSession =
+
SqlSessionFactoryHelper.getInstance().getSqlSessionFactory().openSession(true))
{
+ try (Connection connection = sqlSession.getConnection()) {
+ try (Statement statement = connection.createStatement()) {
+ for (String ddl : ddls) {
+ Assertions.assertDoesNotThrow(
+ () -> statement.execute(ddl),
+ "Failed to execute DDL in file " + scriptFile.getName() +
"ddl: " + ddl);
+ }
+ }
+ }
+ }
+ }
+ }
}
- private void executeSQLScript(Connection connection, File scriptFile) throws
IOException {
- String sqlContent = FileUtils.readFileToString(scriptFile, "UTF-8");
- Arrays.stream(sqlContent.split(";"))
- .map(String::trim)
- .filter(s -> !s.isEmpty())
- .forEach(
- sql -> {
- try (Statement stmt = connection.createStatement()) {
- stmt.execute(sql);
- } catch (SQLException e) {
- throw new RuntimeException("Failed to execute SQL: " + sql, e);
+ private void dropAllTables() throws SQLException {
+ try (SqlSession sqlSession =
+
SqlSessionFactoryHelper.getInstance().getSqlSessionFactory().openSession(true))
{
+ try (Connection connection = sqlSession.getConnection()) {
+ try (Statement statement = connection.createStatement()) {
+ if ("postgresql".equals(backendType)) {
+ dropAllTablesForPostgreSQL(connection);
+ } else {
+ String query = "SHOW TABLES";
Review Comment:
Better to extract a method named `dropAllTablesInH2OrMySQL` or something
similar.
##########
build.gradle.kts:
##########
@@ -209,8 +209,9 @@ allprojects {
param.include("**/integration/test/**")
}
+ val dockerTest = project.rootProject.extra["dockerTest"] as? Boolean ?:
false
Review Comment:
Why do we move it out of unit tests block?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]