This is an automated email from the ASF dual-hosted git repository.
panjuan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new f31465d Support query of other tables of information_schema in
Navicat (#12127)
f31465d is described below
commit f31465dc5ee4c8903ea47b0bf3dc94de293d579e
Author: lanchengx <[email protected]>
AuthorDate: Tue Aug 31 04:26:16 2021 -0500
Support query of other tables of information_schema in Navicat (#12127)
* Refactor select schemata;
support "ENGINES", "FILES", "VIEWS", "COLUMNS", "TABLES", "PROFILING",
"TRIGGERS" table
* Rename the class in order to solve the problem that is not intuitive when
comparing
* Rename method.
* Edit comment & replace array
* modify table name.
* Delete blank lines.
---
.../DatabaseNotExistedException.java} | 13 +-
.../MySQLInformationSchemaExecutorFactory.java | 16 +-
.../AbstractSelectInformationExecutor.java | 191 ++++++++++++++++++++
.../SelectInformationSchemataExecutor.java | 110 ++++++++++++
.../information/SelectSchemataExecutor.java | 157 -----------------
.../information/SelectInformationExecutorTest.java | 192 +++++++++++++++++++++
.../information/SelectSchemataExecutorTest.java | 93 ----------
7 files changed, 509 insertions(+), 263 deletions(-)
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/enums/InformationSchemataEnum.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/exception/DatabaseNotExistedException.java
similarity index 71%
rename from
shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/enums/InformationSchemataEnum.java
rename to
shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/exception/DatabaseNotExistedException.java
index b1e08c6..cd92958 100644
---
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/enums/InformationSchemataEnum.java
+++
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/exception/DatabaseNotExistedException.java
@@ -15,17 +15,12 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.text.admin.mysql.enums;
+package org.apache.shardingsphere.proxy.backend.exception;
/**
- * Enumeration of the fields in the schemata table of the information schema.
+ * Database does not exist exception.
*/
-public enum InformationSchemataEnum {
+public final class DatabaseNotExistedException extends BackendException {
- CATALOG_NAME,
- SCHEMA_NAME,
- DEFAULT_CHARACTER_SET_NAME,
- DEFAULT_COLLATION_NAME,
- SQL_PATH,
- DEFAULT_ENCRYPTION;
+ private static final long serialVersionUID = 4146100333670404924L;
}
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/MySQLInformationSchemaExecutorFactory.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/MySQLInformationSchemaExecutorFactory.java
index d785cd0..a0af9cc 100644
---
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/MySQLInformationSchemaExecutorFactory.java
+++
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/MySQLInformationSchemaExecutorFactory.java
@@ -18,16 +18,22 @@
package org.apache.shardingsphere.proxy.backend.text.admin.mysql;
import
org.apache.shardingsphere.proxy.backend.text.admin.executor.DatabaseAdminQueryExecutor;
-import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information.SelectSchemataExecutor;
+import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information.AbstractSelectInformationExecutor.DefaultSelectInformationExecutor;
+import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information.SelectInformationSchemataExecutor;
import
org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment;
import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
+import java.util.Arrays;
+import java.util.List;
+
/**
* Construct the information schema executor's factory.
*/
public final class MySQLInformationSchemaExecutorFactory {
- public static final String SCHEMATA = "schemata";
+ public static final String SCHEMATA_TABLE = "SCHEMATA";
+
+ public static final List<String> DEFAULT_EXECUTOR_TABLES =
Arrays.asList("ENGINES", "FILES", "VIEWS", "TRIGGERS");
/**
* Create executor.
@@ -38,8 +44,10 @@ public final class MySQLInformationSchemaExecutorFactory {
*/
public static DatabaseAdminQueryExecutor newInstance(final SelectStatement
sqlStatement, final String sql) {
String tableName = ((SimpleTableSegment)
sqlStatement.getFrom()).getTableName().getIdentifier().getValue();
- if (SCHEMATA.equalsIgnoreCase(tableName)) {
- return new SelectSchemataExecutor(sqlStatement, sql);
+ if (SCHEMATA_TABLE.equalsIgnoreCase(tableName)) {
+ return new SelectInformationSchemataExecutor(sqlStatement, sql);
+ } else if (DEFAULT_EXECUTOR_TABLES.contains(tableName.toUpperCase())) {
+ return new DefaultSelectInformationExecutor(sql);
}
throw new UnsupportedOperationException(String.format("unsupported
table : `%s`", tableName));
}
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/AbstractSelectInformationExecutor.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/AbstractSelectInformationExecutor.java
new file mode 100644
index 0000000..26a4c5a
--- /dev/null
+++
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/AbstractSelectInformationExecutor.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information;
+
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.QueryResultMetaData;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.impl.raw.metadata.RawQueryResultColumnMetaData;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.impl.raw.metadata.RawQueryResultMetaData;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.impl.raw.type.RawMemoryQueryResult;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.type.memory.row.MemoryQueryResultDataRow;
+import org.apache.shardingsphere.infra.merge.result.MergedResult;
+import
org.apache.shardingsphere.infra.merge.result.impl.transparent.TransparentMergedResult;
+import
org.apache.shardingsphere.infra.metadata.resource.ShardingSphereResource;
+import
org.apache.shardingsphere.proxy.backend.communication.jdbc.connection.BackendConnection;
+import org.apache.shardingsphere.proxy.backend.context.ProxyContext;
+import
org.apache.shardingsphere.proxy.backend.exception.DatabaseNotExistedException;
+import
org.apache.shardingsphere.proxy.backend.text.admin.executor.DatabaseAdminQueryExecutor;
+
+import javax.sql.DataSource;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+/**
+ * The abstract class of select information schema, used to define the
template.
+ */
+public abstract class AbstractSelectInformationExecutor implements
DatabaseAdminQueryExecutor {
+
+ @Getter
+ private QueryResultMetaData queryResultMetaData;
+
+ @Getter
+ private MergedResult mergedResult;
+
+ @Getter
+ private final LinkedList<Map<String, Object>> rows = new LinkedList<>();
+
+ @Override
+ public final void execute(final BackendConnection backendConnection)
throws SQLException {
+ List<String> schemaNames = getSchemaNames();
+ for (String schemaName : schemaNames) {
+ constructRowData(schemaName, getSourceData(schemaName));
+ }
+ queryResultMetaData = createQueryResultMetaData();
+ mergedResult = createMergedResult();
+ }
+
+ /**
+ * Get the schema names as a condition for SQL execution.
+ *
+ * @return schema names
+ */
+ protected abstract List<String> getSchemaNames();
+
+ /**
+ * Get the source object of the row data.
+ *
+ * @param schemaName schema name
+ * @return source object of row data
+ * @throws SQLException SQLException
+ */
+ protected abstract Object getSourceData(String schemaName) throws
SQLException;
+
+ /**
+ * Construct row data from source data.
+ *
+ * @param schemaName schema name
+ * @param sourceData source data of row data
+ * @throws SQLException SQLException
+ */
+ protected abstract void constructRowData(String schemaName, Object
sourceData) throws SQLException;
+
+ private MergedResult createMergedResult() {
+ List<MemoryQueryResultDataRow> resultDataRows = rows.stream()
+ .map(each -> new MemoryQueryResultDataRow(new
LinkedList<>(each.values()))).collect(Collectors.toList());
+ return new TransparentMergedResult(new
RawMemoryQueryResult(queryResultMetaData, resultDataRows));
+ }
+
+ private RawQueryResultMetaData createQueryResultMetaData() {
+ List<RawQueryResultColumnMetaData> columns =
rows.stream().flatMap(each ->
each.keySet().stream()).collect(Collectors.toSet())
+ .stream().map(each -> new RawQueryResultColumnMetaData("",
each, each, Types.VARCHAR, "VARCHAR", 20, 0)).collect(Collectors.toList());
+ return new RawQueryResultMetaData(columns);
+ }
+
+ /**
+ * Determine whether the current schema has a data source.
+ *
+ * @param schemaName schema name
+ * @return has datasource or not
+ */
+ protected static Boolean hasDatasource(final String schemaName) {
+ return
ProxyContext.getInstance().getMetaData(schemaName).hasDataSource();
+ }
+
+ /**
+ * Default select information executor, execute sql directly in the
database to obtain the result source data.
+ */
+ @Slf4j
+ public static class DefaultSelectInformationExecutor extends
AbstractSelectInformationExecutor {
+
+ @Getter
+ private final String sql;
+
+ public DefaultSelectInformationExecutor(final String sql) {
+ this.sql = sql;
+ }
+
+ /**
+ * Get the schema names as a condition for SQL execution.
+ *
+ * @return schema names
+ */
+ @Override
+ protected List<String> getSchemaNames() {
+ String schema =
ProxyContext.getInstance().getAllSchemaNames().stream().filter(AbstractSelectInformationExecutor::hasDatasource).findFirst().orElseThrow(DatabaseNotExistedException::new);
+ return Collections.singletonList(schema);
+ }
+
+ /**
+ * Get the source data of the row data.
+ *
+ * @param schemaName schema name
+ * @return source data of row data
+ * @throws SQLException SQLException
+ */
+ @Override
+ protected Object getSourceData(final String schemaName) throws
SQLException {
+ ShardingSphereResource resource =
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(schemaName).getResource();
+ Optional<Entry<String, DataSource>> dataSourceEntry =
resource.getDataSources().entrySet().stream().findFirst();
+ log.info("Actual SQL: {} ::: {}",
dataSourceEntry.orElseThrow(DatabaseNotExistedException::new).getKey(), sql);
+ return
dataSourceEntry.get().getValue().getConnection().prepareStatement(sql).executeQuery();
+ }
+
+ /**
+ * Construct row data from source data.
+ *
+ * @param schemaName schema name
+ * @param sourceData source data of row data
+ * @throws SQLException SQLException
+ */
+ @Override
+ protected void constructRowData(final String schemaName, final Object
sourceData) throws SQLException {
+ ResultSet resultSet = (ResultSet) sourceData;
+ while (resultSet.next()) {
+ Map<String, Object> row = new HashMap<>();
+ ResultSetMetaData metaData = resultSet.getMetaData();
+ for (int i = 1; i < metaData.getColumnCount() + 1; i++) {
+ row.put(resultSet.getMetaData().getColumnName(i),
resultSet.getString(i));
+ }
+ rowPostProcessing(schemaName, row);
+ if (!row.isEmpty()) {
+ getRows().addFirst(row);
+ }
+ }
+ }
+
+ /**
+ * Custom processing.
+ *
+ * @param schemaName schema name
+ * @param rows row data
+ */
+ protected void rowPostProcessing(final String schemaName, final
Map<String, Object> rows) {
+ }
+ }
+}
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectInformationSchemataExecutor.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectInformationSchemataExecutor.java
new file mode 100644
index 0000000..1200e03
--- /dev/null
+++
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectInformationSchemataExecutor.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information;
+
+import
org.apache.shardingsphere.infra.metadata.resource.ShardingSphereResource;
+import org.apache.shardingsphere.proxy.backend.context.ProxyContext;
+import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information.AbstractSelectInformationExecutor.DefaultSelectInformationExecutor;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
+import
org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Schemata query executor, used to query the schemata table.
+ */
+public final class SelectInformationSchemataExecutor extends
DefaultSelectInformationExecutor {
+
+ public static final String SCHEMA_NAME = "SCHEMA_NAME";
+
+ public static final String DEFAULT_CHARACTER_SET_NAME =
"DEFAULT_CHARACTER_SET_NAME";
+
+ public static final String DEFAULT_COLLATION_NAME =
"DEFAULT_COLLATION_NAME";
+
+ public static final String CATALOG_NAME = "CATALOG_NAME";
+
+ public static final String SQL_PATH = "SQL_PATH";
+
+ public static final String DEFAULT_ENCRYPTION = "DEFAULT_ENCRYPTION";
+
+ private static final Set<String> SCHEMA_WITHOUT_DATA_SOURCE = new
LinkedHashSet<>();
+
+ private final SelectStatement sqlStatement;
+
+ public SelectInformationSchemataExecutor(final SelectStatement
sqlStatement, final String sql) {
+ super(sql);
+ this.sqlStatement = sqlStatement;
+ }
+
+ @Override
+ protected List<String> getSchemaNames() {
+ List<String> schemaNames =
ProxyContext.getInstance().getAllSchemaNames();
+ SCHEMA_WITHOUT_DATA_SOURCE.addAll(schemaNames.stream().filter(each ->
!AbstractSelectInformationExecutor.hasDatasource(each)).collect(Collectors.toSet()));
+ List<String> result =
schemaNames.stream().filter(AbstractSelectInformationExecutor::hasDatasource).collect(Collectors.toList());
+ if (!SCHEMA_WITHOUT_DATA_SOURCE.isEmpty()) {
+ fillSchemasWithoutDatasource();
+ }
+ return result;
+ }
+
+ @Override
+ protected void rowPostProcessing(final String schemaName, final
Map<String, Object> rows) {
+ ShardingSphereResource resource =
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(schemaName).getResource();
+ Set<String> catalogs =
resource.getDataSources().keySet().stream().map(each ->
resource.getDataSourcesMetaData().getDataSourceMetaData(each).getCatalog()).collect(Collectors.toSet());
+ String rowValue = null == rows.get(SCHEMA_NAME) ?
rows.getOrDefault(SCHEMA_NAME.toLowerCase(), "").toString() :
rows.getOrDefault(SCHEMA_NAME, "").toString();
+ if (catalogs.contains(rowValue)) {
+ rows.replace(SCHEMA_NAME, schemaName);
+ } else {
+ rows.clear();
+ }
+ }
+
+ private void fillSchemasWithoutDatasource() {
+ if (SCHEMA_WITHOUT_DATA_SOURCE.isEmpty()) {
+ return;
+ }
+ Map<String, String> defaultRowData = getTheDefaultRowData();
+ SCHEMA_WITHOUT_DATA_SOURCE.forEach(each -> {
+ Map<String, Object> row = new HashMap<>(defaultRowData);
+ row.replace(SCHEMA_NAME, each);
+ getRows().addLast(row);
+ });
+ SCHEMA_WITHOUT_DATA_SOURCE.clear();
+ }
+
+ private Map<String, String> getTheDefaultRowData() {
+ Map<String, String> result;
+ Collection<ProjectionSegment> projections =
sqlStatement.getProjections().getProjections();
+ if (projections.stream().anyMatch(each -> each instanceof
ShorthandProjectionSegment)) {
+ result = Stream.of(CATALOG_NAME, SCHEMA_NAME,
DEFAULT_CHARACTER_SET_NAME, DEFAULT_COLLATION_NAME, SQL_PATH,
DEFAULT_ENCRYPTION).collect(Collectors.toMap(each -> each, each -> ""));
+ } else {
+ result = projections.stream().map(each ->
((ColumnProjectionSegment)
each).getColumn().getIdentifier()).map(IdentifierValue::getValue).collect(Collectors.toMap(each
-> each, each -> ""));
+ }
+ return result;
+ }
+}
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectSchemataExecutor.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectSchemataExecutor.java
deleted file mode 100644
index 940039d..0000000
---
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/main/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectSchemataExecutor.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information;
-
-import lombok.Getter;
-import lombok.extern.slf4j.Slf4j;
-import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.QueryResult;
-import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.QueryResultMetaData;
-import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.impl.raw.metadata.RawQueryResultColumnMetaData;
-import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.impl.raw.metadata.RawQueryResultMetaData;
-import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.impl.raw.type.RawMemoryQueryResult;
-import
org.apache.shardingsphere.infra.executor.sql.execute.result.query.type.memory.row.MemoryQueryResultDataRow;
-import org.apache.shardingsphere.infra.merge.result.MergedResult;
-import
org.apache.shardingsphere.infra.merge.result.impl.transparent.TransparentMergedResult;
-import
org.apache.shardingsphere.infra.metadata.resource.ShardingSphereResource;
-import
org.apache.shardingsphere.proxy.backend.communication.jdbc.connection.BackendConnection;
-import org.apache.shardingsphere.proxy.backend.context.ProxyContext;
-import
org.apache.shardingsphere.proxy.backend.text.admin.executor.DatabaseAdminQueryExecutor;
-import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.enums.InformationSchemataEnum;
-import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment;
-import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment;
-import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment;
-import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
-import
org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue;
-
-import javax.sql.DataSource;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
-/**
- * Schemata query executor.
- */
-@Slf4j
-public final class SelectSchemataExecutor implements
DatabaseAdminQueryExecutor {
-
- @Getter
- private QueryResultMetaData queryResultMetaData;
-
- @Getter
- private MergedResult mergedResult;
-
- private final Map<String, String> initResultSetMap;
-
- private final Map<String, Map<String, String>> schemaMap = new HashMap<>();
-
- private final String sql;
-
- public SelectSchemataExecutor(final SelectStatement sqlStatement, final
String sql) {
- this.sql = sql;
- Collection<ProjectionSegment> projections =
sqlStatement.getProjections().getProjections();
- checkSegment(projections);
- initResultSetMap = isShorthandSegment(projections) ?
initResultSetMap() : initResultSetMap(projections);
- }
-
- private void checkSegment(final Collection<ProjectionSegment> projections)
{
- if (!isShorthandSegment(projections) &&
projections.stream().anyMatch(each -> !(each instanceof
ColumnProjectionSegment))) {
- throw new UnsupportedOperationException(String.format("unsupported
SQL : %s ", sql));
- }
- }
-
- private Boolean isShorthandSegment(final Collection<ProjectionSegment>
projections) {
- return projections.stream().anyMatch(each -> each instanceof
ShorthandProjectionSegment);
- }
-
- private Map<String, String> initResultSetMap(final
Collection<ProjectionSegment> projections) {
- return projections.stream().map(each -> {
- IdentifierValue identifier = ((ColumnProjectionSegment)
each).getColumn().getIdentifier();
- return identifier.getValue();
- }).collect(Collectors.toMap(each -> each, each -> ""));
- }
-
- private Map<String, String> initResultSetMap() {
- return
Arrays.stream(InformationSchemataEnum.values()).map(Enum::name).collect(Collectors.toMap(each
-> each, each -> ""));
- }
-
- @Override
- public void execute(final BackendConnection backendConnection) throws
SQLException {
- for (String each : ProxyContext.getInstance().getAllSchemaNames()) {
- Map<String, String> resultSetMap = new HashMap<>(initResultSetMap);
- schemaMap.put(each, resultSetMap);
- ShardingSphereResource resource =
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(each).getResource();
- Optional<Entry<String, DataSource>> dataSourceEntry =
resource.getDataSources().entrySet().stream().findFirst();
- if (!dataSourceEntry.isPresent()) {
- continue;
- }
- String catalog =
resource.getDataSourcesMetaData().getDataSourceMetaData(dataSourceEntry.get().getKey()).getCatalog();
- log.info("Actual SQL: {} ::: {}", dataSourceEntry.get().getKey(),
sql);
- // TODO Splicing where catalog?
- ResultSet resultSet =
dataSourceEntry.get().getValue().getConnection().prepareStatement(sql).executeQuery();
- while (resultSet.next()) {
- String actualDatabaseName =
resultSet.getString(InformationSchemataEnum.SCHEMA_NAME.name());
- if (!catalog.equals(actualDatabaseName)) {
- continue;
- }
- putInResultSetMap(resultSetMap, resultSet);
- break;
- }
- }
- mergedResult = new TransparentMergedResult(getQueryResult());
- queryResultMetaData = createQueryResultMetaData();
- }
-
- private void putInResultSetMap(final Map<String, String> resultSetMap,
final ResultSet resultSet) throws SQLException {
- for (String each : resultSetMap.keySet()) {
- resultSetMap.put(each, resultSet.getString(each));
- }
- }
-
- private RawQueryResultMetaData createQueryResultMetaData() {
- List<RawQueryResultColumnMetaData> columns =
initResultSetMap.keySet().stream()
- .map(each -> new RawQueryResultColumnMetaData("", each, each,
Types.VARCHAR, "VARCHAR", 20, 0))
- .collect(Collectors.toList());
- return new RawQueryResultMetaData(columns);
- }
-
- private QueryResult getQueryResult() {
- List<MemoryQueryResultDataRow> rows = schemaMap.entrySet().stream()
- .map(this::replaceQueryResults)
- .map(each -> new MemoryQueryResultDataRow(new
ArrayList<>(each.getValue().values())))
- .collect(Collectors.toList());
- return new RawMemoryQueryResult(queryResultMetaData, rows);
- }
-
- private Entry<String, Map<String, String>> replaceQueryResults(final
Entry<String, Map<String, String>> entry) {
- entry.getValue().forEach((key, value) -> {
- if
(InformationSchemataEnum.SCHEMA_NAME.name().equalsIgnoreCase(key)) {
-
entry.getValue().put(InformationSchemataEnum.SCHEMA_NAME.name(),
entry.getKey());
- }
- });
- return entry;
- }
-}
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/test/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectInformationExecutorTest.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/test/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectInformationExecutorTest.java
new file mode 100644
index 0000000..d78d344
--- /dev/null
+++
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/test/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectInformationExecutorTest.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information;
+
+import com.zaxxer.hikari.pool.HikariProxyResultSet;
+import
org.apache.shardingsphere.infra.config.properties.ConfigurationProperties;
+import org.apache.shardingsphere.infra.database.type.dialect.MySQLDatabaseType;
+import org.apache.shardingsphere.infra.executor.kernel.ExecutorEngine;
+import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData;
+import
org.apache.shardingsphere.infra.metadata.resource.CachedDatabaseMetaData;
+import org.apache.shardingsphere.infra.metadata.resource.DataSourcesMetaData;
+import
org.apache.shardingsphere.infra.metadata.resource.ShardingSphereResource;
+import
org.apache.shardingsphere.infra.metadata.rule.ShardingSphereRuleMetaData;
+import org.apache.shardingsphere.infra.metadata.schema.ShardingSphereSchema;
+import org.apache.shardingsphere.infra.optimize.context.OptimizeContextFactory;
+import org.apache.shardingsphere.infra.parser.ShardingSphereSQLParserEngine;
+import org.apache.shardingsphere.mode.manager.ContextManager;
+import org.apache.shardingsphere.mode.metadata.MetaDataContexts;
+import org.apache.shardingsphere.mode.persist.PersistService;
+import
org.apache.shardingsphere.proxy.backend.communication.jdbc.connection.BackendConnection;
+import org.apache.shardingsphere.proxy.backend.context.ProxyContext;
+import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information.AbstractSelectInformationExecutor.DefaultSelectInformationExecutor;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+import javax.sql.DataSource;
+import java.lang.reflect.Field;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public final class SelectInformationExecutorTest {
+
+ private static final ResultSet RESULT_SET =
mock(HikariProxyResultSet.class);
+
+ @Before
+ public void setUp() throws IllegalAccessException, NoSuchFieldException,
SQLException {
+ Field contextManagerField =
ProxyContext.getInstance().getClass().getDeclaredField("contextManager");
+ contextManagerField.setAccessible(true);
+ ContextManager contextManager = mock(ContextManager.class,
RETURNS_DEEP_STUBS);
+ MetaDataContexts metaDataContexts = new
MetaDataContexts(mock(PersistService.class),
+ new HashMap<>(), mock(ShardingSphereRuleMetaData.class),
mock(ExecutorEngine.class), new ConfigurationProperties(new Properties()),
mock(OptimizeContextFactory.class));
+
when(contextManager.getMetaDataContexts()).thenReturn(metaDataContexts);
+ ProxyContext.getInstance().init(contextManager);
+ }
+
+ private void mockResultSet(final Map<String, String> mockMap, final
Boolean... values) throws SQLException {
+ ResultSetMetaData metaData = mock(ResultSetMetaData.class);
+ List<String> keys = new ArrayList<>(mockMap.keySet());
+ for (int i = 0; i < keys.size(); i++) {
+ when(metaData.getColumnName(i + 1)).thenReturn(keys.get(i));
+ when(RESULT_SET.getString(i +
1)).thenReturn(mockMap.get(keys.get(i)));
+ }
+ when(RESULT_SET.next()).thenReturn(true, false);
+ when(metaData.getColumnCount()).thenReturn(mockMap.size());
+ when(RESULT_SET.getMetaData()).thenReturn(metaData);
+
+ }
+
+ private ShardingSphereMetaData getMetaData() throws SQLException {
+ return new ShardingSphereMetaData("sharding_db",
+ new ShardingSphereResource(mockDatasourceMap(),
mockDataSourcesMetaData(), mock(CachedDatabaseMetaData.class),
mock(MySQLDatabaseType.class)),
+ mock(ShardingSphereRuleMetaData.class),
mock(ShardingSphereSchema.class)
+ );
+ }
+
+ private Map<String, DataSource> mockDatasourceMap() throws SQLException {
+ DataSource dataSource = mock(DataSource.class, RETURNS_DEEP_STUBS);
+
when(dataSource.getConnection().prepareStatement(any(String.class)).executeQuery()).thenReturn(RESULT_SET);
+ Map<String, DataSource> dataSourceMap = new HashMap<>();
+ dataSourceMap.put("ds_0", dataSource);
+ return dataSourceMap;
+ }
+
+ private DataSourcesMetaData mockDataSourcesMetaData() {
+ DataSourcesMetaData meta = mock(DataSourcesMetaData.class,
RETURNS_DEEP_STUBS);
+
when(meta.getDataSourceMetaData("ds_0").getCatalog()).thenReturn("demo_ds_0");
+ return meta;
+ }
+
+ @Test
+ public void assertSelectSchemataExecute() throws SQLException {
+ final String sql = "SELECT SCHEMA_NAME, DEFAULT_CHARACTER_SET_NAME,
DEFAULT_COLLATION_NAME FROM information_schema.SCHEMATA";
+ final SQLStatement sqlStatement = new
ShardingSphereSQLParserEngine("MySQL").parse(sql, false);
+ Map<String, String> mockResultSetMap = new HashMap<>();
+ mockResultSetMap.put("SCHEMA_NAME", "demo_ds_0");
+ mockResultSetMap.put("DEFAULT_CHARACTER_SET_NAME",
"utf8mb4_0900_ai_ci");
+ mockResultSetMap.put("DEFAULT_COLLATION_NAME", "utf8mb4");
+ mockResultSet(mockResultSetMap, true, false);
+ Map<String, ShardingSphereMetaData> metaDataMap =
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaDataMap();
+ metaDataMap.put("sharding_db", getMetaData());
+ metaDataMap.put("test", mock(ShardingSphereMetaData.class));
+ SelectInformationSchemataExecutor selectSchemataExecutor = new
SelectInformationSchemataExecutor((SelectStatement) sqlStatement, sql);
+ selectSchemataExecutor.execute(mock(BackendConnection.class));
+
assertThat(selectSchemataExecutor.getQueryResultMetaData().getColumnCount(),
is(mockResultSetMap.size()));
+ int count = 0;
+ while (selectSchemataExecutor.getMergedResult().next()) {
+ count++;
+ if
("sharding_db".equals(selectSchemataExecutor.getMergedResult().getValue(1,
String.class))) {
+
assertThat(selectSchemataExecutor.getMergedResult().getValue(2, String.class),
is("utf8mb4"));
+
assertThat(selectSchemataExecutor.getMergedResult().getValue(3, String.class),
is("utf8mb4_0900_ai_ci"));
+ } else if
("test".equals(selectSchemataExecutor.getMergedResult().getValue(1,
String.class))) {
+
assertThat(selectSchemataExecutor.getMergedResult().getValue(2, String.class),
is(""));
+
assertThat(selectSchemataExecutor.getMergedResult().getValue(3, String.class),
is(""));
+ } else {
+ fail("expected : `sharding_db` or `test`");
+ }
+ }
+ assertThat(count, is(2));
+ }
+
+ @Test
+ public void assertSelectSchemataInSchemaWithoutDataSourceExecute() throws
SQLException {
+ final String sql = "SELECT SCHEMA_NAME, DEFAULT_CHARACTER_SET_NAME,
DEFAULT_COLLATION_NAME, DEFAULT_ENCRYPTION FROM information_schema.SCHEMATA";
+ final SQLStatement sqlStatement = new
ShardingSphereSQLParserEngine("MySQL").parse(sql, false);
+ Map<String, String> mockResultSetMap = new HashMap<>();
+ mockResultSetMap.put("SCHEMA_NAME", "demo_ds_0");
+ mockResultSetMap.put("DEFAULT_CHARACTER_SET_NAME",
"utf8mb4_0900_ai_ci");
+ mockResultSetMap.put("DEFAULT_COLLATION_NAME", "utf8mb4");
+ mockResultSetMap.put("DEFAULT_ENCRYPTION", "NO");
+ mockResultSet(mockResultSetMap, false);
+ Map<String, ShardingSphereMetaData> metaDataMap =
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaDataMap();
+ metaDataMap.put("sharding_db", mock(ShardingSphereMetaData.class));
+ SelectInformationSchemataExecutor selectSchemataExecutor = new
SelectInformationSchemataExecutor((SelectStatement) sqlStatement, sql);
+ selectSchemataExecutor.execute(mock(BackendConnection.class));
+
assertThat(selectSchemataExecutor.getQueryResultMetaData().getColumnCount(),
is(mockResultSetMap.size()));
+ while (selectSchemataExecutor.getMergedResult().next()) {
+ assertThat(selectSchemataExecutor.getMergedResult().getValue(1,
String.class), is("sharding_db"));
+ assertThat(selectSchemataExecutor.getMergedResult().getValue(2,
String.class), is(""));
+ assertThat(selectSchemataExecutor.getMergedResult().getValue(3,
String.class), is(""));
+ assertThat(selectSchemataExecutor.getMergedResult().getValue(4,
String.class), is(""));
+ }
+ }
+
+ @Test
+ public void assertSelectSchemataInNoSchemaExecute() throws SQLException {
+ final String sql = "SELECT SCHEMA_NAME, DEFAULT_CHARACTER_SET_NAME,
DEFAULT_COLLATION_NAME, DEFAULT_ENCRYPTION FROM information_schema.SCHEMATA";
+ final SQLStatement sqlStatement = new
ShardingSphereSQLParserEngine("MySQL").parse(sql, false);
+ SelectInformationSchemataExecutor selectSchemataExecutor = new
SelectInformationSchemataExecutor((SelectStatement) sqlStatement, sql);
+ selectSchemataExecutor.execute(mock(BackendConnection.class));
+
assertThat(selectSchemataExecutor.getQueryResultMetaData().getColumnCount(),
is(0));
+ }
+
+ @Test
+ public void assertDefaultExecute() throws SQLException {
+ final String sql = "SELECT COUNT(*) AS support_ndb FROM
information_schema.ENGINES WHERE Engine = 'ndbcluster'";
+ Map<String, String> mockMap = new HashMap<>();
+ mockMap.put("support_ndb", "0");
+ mockResultSet(mockMap, false);
+ Map<String, ShardingSphereMetaData> metaDataMap =
ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaDataMap();
+ metaDataMap.put("sharding_db", getMetaData());
+ DefaultSelectInformationExecutor defaultSelectMetadataExecutor = new
DefaultSelectInformationExecutor(sql);
+ defaultSelectMetadataExecutor.execute(mock(BackendConnection.class));
+
assertThat(defaultSelectMetadataExecutor.getQueryResultMetaData().getColumnCount(),
is(mockMap.size()));
+ while (defaultSelectMetadataExecutor.getMergedResult().next()) {
+
assertThat(defaultSelectMetadataExecutor.getMergedResult().getValue(1,
String.class), is("0"));
+ }
+ }
+}
diff --git
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/test/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectSchemataExecutorTest.java
b/shardingsphere-proxy/shardingsphere-proxy-backend/src/test/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectSchemataExecutorTest.java
deleted file mode 100644
index efcfe15..0000000
---
a/shardingsphere-proxy/shardingsphere-proxy-backend/src/test/java/org/apache/shardingsphere/proxy/backend/text/admin/mysql/executor/information/SelectSchemataExecutorTest.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package
org.apache.shardingsphere.proxy.backend.text.admin.mysql.executor.information;
-
-import com.zaxxer.hikari.pool.HikariProxyResultSet;
-import org.apache.shardingsphere.infra.parser.ShardingSphereSQLParserEngine;
-import org.apache.shardingsphere.mode.manager.ContextManager;
-import
org.apache.shardingsphere.proxy.backend.communication.jdbc.connection.BackendConnection;
-import org.apache.shardingsphere.proxy.backend.context.ProxyContext;
-import
org.apache.shardingsphere.proxy.backend.text.admin.mysql.enums.InformationSchemataEnum;
-import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
-import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.junit.MockitoJUnitRunner;
-
-import javax.sql.DataSource;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
-import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-@RunWith(MockitoJUnitRunner.class)
-public final class SelectSchemataExecutorTest {
-
- private static final String SQL = "SELECT SCHEMA_NAME,
DEFAULT_CHARACTER_SET_NAME, DEFAULT_COLLATION_NAME FROM
information_schema.SCHEMATA";
-
- private SelectSchemataExecutor selectSchemataExecutor;
-
- @Before
- public void setUp() throws IllegalAccessException, NoSuchFieldException,
SQLException {
- SQLStatement sqlStatement = new
ShardingSphereSQLParserEngine("MySQL").parse(SQL, false);
- selectSchemataExecutor = new SelectSchemataExecutor((SelectStatement)
sqlStatement, SQL);
- ResultSet resultSet = mock(HikariProxyResultSet.class);
-
when(resultSet.getString(InformationSchemataEnum.SCHEMA_NAME.name())).thenReturn("demo_ds_0");
-
when(resultSet.getString(InformationSchemataEnum.DEFAULT_CHARACTER_SET_NAME.name())).thenReturn("utf8mb4");
-
when(resultSet.getString(InformationSchemataEnum.DEFAULT_COLLATION_NAME.name())).thenReturn("utf8mb4_0900_ai_ci");
- when(resultSet.next()).thenReturn(true);
- ContextManager contextManager = mock(ContextManager.class,
RETURNS_DEEP_STUBS);
-
when(contextManager.getMetaDataContexts().getAllSchemaNames()).thenReturn(Collections.singletonList("sharding_db"));
-
when(contextManager.getMetaDataContexts().getMetaData("sharding_db").getResource().getDataSourcesMetaData().getDataSourceMetaData("ds_0").getCatalog()).thenReturn("demo_ds_0");
- Map<String, DataSource> datasourceMap = mockDatasourceMap(resultSet);
-
when(contextManager.getMetaDataContexts().getMetaData("sharding_db").getResource().getDataSources()).thenReturn(datasourceMap);
- ProxyContext.getInstance().init(contextManager);
- }
-
- private Map<String, DataSource> mockDatasourceMap(final ResultSet
resultSet) throws SQLException {
- DataSource dataSource = mock(DataSource.class, RETURNS_DEEP_STUBS);
-
when(dataSource.getConnection().prepareStatement(SQL).executeQuery()).thenReturn(resultSet);
- Map<String, DataSource> dataSourceMap = new HashMap<>();
- dataSourceMap.put("ds_0", dataSource);
- return dataSourceMap;
- }
-
- @Test
- public void assertExecute() throws SQLException {
- selectSchemataExecutor.execute(mockBackendConnection());
-
assertThat(selectSchemataExecutor.getQueryResultMetaData().getColumnCount(),
is(3));
- while (selectSchemataExecutor.getMergedResult().next()) {
- assertThat(selectSchemataExecutor.getMergedResult().getValue(1,
String.class), is("sharding_db"));
- assertThat(selectSchemataExecutor.getMergedResult().getValue(2,
String.class), is("utf8mb4_0900_ai_ci"));
- assertThat(selectSchemataExecutor.getMergedResult().getValue(3,
String.class), is("utf8mb4"));
- }
- }
-
- private BackendConnection mockBackendConnection() {
- BackendConnection result = mock(BackendConnection.class);
- return result;
- }
-}