This is an automated email from the ASF dual-hosted git repository.
zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 009ce3d7b4e Add test cases on PostgreSQLTablePropertiesLoader (#33425)
009ce3d7b4e is described below
commit 009ce3d7b4e4006b6b822ffaed67bcf7771ee26e
Author: Liang Zhang <[email protected]>
AuthorDate: Sun Oct 27 18:59:51 2024 +0800
Add test cases on PostgreSQLTablePropertiesLoader (#33425)
* Add test cases on PostgreSQLTablePropertiesLoader
* Add test cases on PostgreSQLTablePropertiesLoader
---
.../ddl/table/PostgreSQLTablePropertiesLoader.java | 81 ++++++++++------------
.../table/PostgreSQLTablePropertiesLoaderTest.java | 75 ++++++++++++++++++++
2 files changed, 111 insertions(+), 45 deletions(-)
diff --git
a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoader.java
b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoader.java
index e1aa32e5ca6..ea6aeb180a4 100644
---
a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoader.java
+++
b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoader.java
@@ -23,6 +23,7 @@ import java.sql.Connection;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
@@ -50,55 +51,40 @@ public final class PostgreSQLTablePropertiesLoader {
* @throws SQLException SQL exception
*/
public Map<String, Object> load() throws SQLException {
- Map<String, Object> result = new LinkedHashMap<>();
- fetchDataBaseId(result);
- fetchSchemaId(result);
- fetchTableId(result);
+ Map<String, Object> result = new LinkedHashMap<>(fetchDatabaseId());
+ result.putAll(fetchSchemaId());
+ result.putAll(fetchTableId());
fetchTableProperties(result);
return result;
}
- private void fetchDataBaseId(final Map<String, Object> context) throws
SQLException {
- Map<String, Object> params = new LinkedHashMap<>();
- params.put("databaseName",
templateExecutor.getConnection().getCatalog());
- context.putAll(templateExecutor.executeByTemplateForSingleRow(params,
"component/table/%s/get_database_id.ftl"));
+ private Map<String, Object> fetchDatabaseId() throws SQLException {
+ Map<String, Object> params = Collections.singletonMap("databaseName",
templateExecutor.getConnection().getCatalog());
+ return templateExecutor.executeByTemplateForSingleRow(params,
"component/table/%s/get_database_id.ftl");
}
- private void fetchTableId(final Map<String, Object> context) {
- Map<String, Object> params = new LinkedHashMap<>();
- params.put("schemaName", schemaName);
- params.put("tableName", tableName);
- context.putAll(templateExecutor.executeByTemplateForSingleRow(params,
"component/table/%s/get_table_id.ftl"));
+ private Map<String, Object> fetchSchemaId() {
+ Map<String, Object> params = Collections.singletonMap("schemaName",
schemaName);
+ return templateExecutor.executeByTemplateForSingleRow(params,
"component/table/%s/get_schema_id.ftl");
}
- private void fetchSchemaId(final Map<String, Object> context) {
- Map<String, Object> params = new LinkedHashMap<>();
+ private Map<String, Object> fetchTableId() {
+ Map<String, Object> params = new LinkedHashMap<>(2, 1F);
params.put("schemaName", schemaName);
- context.putAll(templateExecutor.executeByTemplateForSingleRow(params,
"component/table/%s/get_schema_id.ftl"));
+ params.put("tableName", tableName);
+ return templateExecutor.executeByTemplateForSingleRow(params,
"component/table/%s/get_table_id.ftl");
}
private void fetchTableProperties(final Map<String, Object> context)
throws SQLException {
context.putAll(templateExecutor.executeByTemplateForSingleRow(context,
"component/table/%s/properties.ftl"));
- updateAutovacuumProperties(context);
- checkRlspolicySupport(context);
+ updateAutoVacuumProperties(context);
+ updateRlspolicySupport(context);
templateExecutor.formatSecurityLabels(context);
}
- private void updateAutovacuumProperties(final Map<String, Object> context)
{
- if (null == context.get("autovacuum_enabled")) {
- context.put("autovacuum_enabled", "x");
- } else if
(Boolean.TRUE.toString().equalsIgnoreCase(context.get("autovacuum_enabled").toString()))
{
- context.put("autovacuum_enabled", "t");
- } else {
- context.put("autovacuum_enabled", "f");
- }
- if (null == context.get("toast_autovacuum_enabled")) {
- context.put("toast_autovacuum_enabled", "x");
- } else if
(Boolean.TRUE.toString().equalsIgnoreCase(context.get("toast_autovacuum_enabled").toString()))
{
- context.put("toast_autovacuum_enabled", "t");
- } else {
- context.put("toast_autovacuum_enabled", "f");
- }
+ private void updateAutoVacuumProperties(final Map<String, Object> context)
{
+ context.put("autovacuum_enabled",
getAutoVacuumEnabled(context.get("autovacuum_enabled")));
+ context.put("toast_autovacuum_enabled",
getAutoVacuumEnabled(context.get("toast_autovacuum_enabled")));
context.put("autovacuum_custom", anyIsTrue(Arrays.asList(
context.get("autovacuum_vacuum_threshold"),
context.get("autovacuum_vacuum_scale_factor"),
@@ -121,23 +107,28 @@ public final class PostgreSQLTablePropertiesLoader {
context.get("toast_autovacuum_freeze_table_age"))) ||
"t".equals(context.get("toast_autovacuum_enabled")) ||
"f".equals(context.get("toast_autovacuum_enabled")));
}
- private void checkRlspolicySupport(final Map<String, Object> context) {
- if (context.containsKey("rlspolicy")) {
- if (context.get("rlspolicy") instanceof String &&
Boolean.TRUE.toString().equals(context.get("rlspolicy"))) {
- context.put("rlspolicy", true);
- }
- if (context.get("forcerlspolicy") instanceof String &&
Boolean.TRUE.toString().equals(context.get("forcerlspolicy"))) {
- context.put("forcerlspolicy", true);
- }
+ private String getAutoVacuumEnabled(final Object autoVacuumEnabled) {
+ if (null == autoVacuumEnabled) {
+ return "x";
+ }
+ if (Boolean.parseBoolean(autoVacuumEnabled.toString())) {
+ return "t";
}
+ return "f";
}
private boolean anyIsTrue(final Collection<Object> collection) {
- for (Object each : collection) {
- if (each instanceof Boolean && (Boolean) each) {
- return true;
+ return collection.stream().anyMatch(each -> each instanceof Boolean &&
(Boolean) each);
+ }
+
+ private void updateRlspolicySupport(final Map<String, Object> context) {
+ if (context.containsKey("rlspolicy")) {
+ if (context.get("rlspolicy") instanceof String &&
Boolean.parseBoolean(context.get("rlspolicy").toString())) {
+ context.put("rlspolicy", true);
+ }
+ if (context.get("forcerlspolicy") instanceof String &&
Boolean.parseBoolean(context.get("forcerlspolicy").toString())) {
+ context.put("forcerlspolicy", true);
}
}
- return false;
}
}
diff --git
a/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoaderTest.java
b/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoaderTest.java
new file mode 100644
index 00000000000..8556ea175b1
--- /dev/null
+++
b/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/sqlbuilder/ddl/table/PostgreSQLTablePropertiesLoaderTest.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package
org.apache.shardingsphere.data.pipeline.postgresql.sqlbuilder.ddl.table;
+
+import org.junit.jupiter.api.Test;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+class PostgreSQLTablePropertiesLoaderTest {
+
+ @Test
+ void assertLoad() throws SQLException {
+ Connection connection = mock(Connection.class, RETURNS_DEEP_STUBS);
+ when(connection.getCatalog()).thenReturn("foo_db");
+ ResultSet fetchDatabaseIdResultSet = mockFetchDatabaseIdResultSet();
+ when(connection.createStatement().executeQuery(System.lineSeparator()
+ "SELECT oid AS did, datlastsysoid FROM pg_catalog.pg_database WHERE datname =
'foo_db';" + System.lineSeparator()))
+ .thenReturn(fetchDatabaseIdResultSet);
+ ResultSet fetchSchemaIdResultSet = mockFetchSchemaIdResultSet();
+ when(connection.createStatement().executeQuery(System.lineSeparator()
+ "SELECT oid AS scid FROM pg_catalog.pg_namespace WHERE nspname =
'foo_schema';" + System.lineSeparator()))
+ .thenReturn(fetchSchemaIdResultSet);
+ Map<String, Object> actual = new
PostgreSQLTablePropertiesLoader(connection, "foo_tbl", "foo_schema", 12,
0).load();
+ assertThat(actual.size(), is(7));
+ assertThat(actual.get("did"), is(1));
+ assertThat(actual.get("datlastsysoid"), is(10));
+ assertThat(actual.get("scid"), is(20));
+ assertThat(actual.get("autovacuum_enabled"), is("x"));
+ assertThat(actual.get("toast_autovacuum_enabled"), is("x"));
+ assertThat(actual.get("autovacuum_custom"), is(false));
+ assertThat(actual.get("toast_autovacuum"), is(false));
+ }
+
+ private ResultSet mockFetchDatabaseIdResultSet() throws SQLException {
+ ResultSet result = mock(ResultSet.class, RETURNS_DEEP_STUBS);
+ when(result.getMetaData().getColumnCount()).thenReturn(2);
+ when(result.next()).thenReturn(true);
+ when(result.getMetaData().getColumnName(1)).thenReturn("did");
+ when(result.getObject(1)).thenReturn(1);
+
when(result.getMetaData().getColumnName(2)).thenReturn("datlastsysoid");
+ when(result.getObject(2)).thenReturn(10);
+ return result;
+ }
+
+ private ResultSet mockFetchSchemaIdResultSet() throws SQLException {
+ ResultSet result = mock(ResultSet.class, RETURNS_DEEP_STUBS);
+ when(result.getMetaData().getColumnCount()).thenReturn(1);
+ when(result.next()).thenReturn(true);
+ when(result.getMetaData().getColumnName(1)).thenReturn("scid");
+ when(result.getObject(1)).thenReturn(20);
+ return result;
+ }
+}