This is an automated email from the ASF dual-hosted git repository.
zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new f7e45fc1d3a Proxy For HBase add query and return result logic (#24536)
f7e45fc1d3a is described below
commit f7e45fc1d3af2964e0cce53ceec468ff509b62be
Author: galaxy <[email protected]>
AuthorDate: Tue Mar 14 14:59:05 2023 +0800
Proxy For HBase add query and return result logic (#24536)
* Support netty parameter ChannelOption.SO_BACKLOG configurable (#17812)
* Proxy For HBase add Query and Result
* Proxy For HBase add unit test
* Fix CheckStyle For HBase Feature
* Fix CheckStyle For HBase
* Fix CheckStyle For HBase
* Fix CheckStyle For HBase
* restore todo list for HBase exeception
---
.../infra/binder/SQLStatementContextFactory.java | 4 +-
proxy/backend/type/hbase/pom.xml | 12 +
.../HBaseOperation.java} | 24 +-
.../hbase/config/YamlHBaseConfiguration.java | 2 +-
.../proxy/backend/hbase/context/HBaseContext.java | 4 +-
.../hbase/context/HBaseMetaDataRefresher.java | 2 +-
.../hbase/context/HBaseRegionWarmUpContext.java | 25 ++-
.../HBaseDatabaseConverter.java} | 18 +-
.../converter/HBaseDatabaseConverterFactory.java | 57 +++++
.../converter/HBaseDatabaseDeleteConverter.java | 79 +++++++
.../converter/HBaseDatabaseInsertConverter.java | 65 ++++++
.../HBaseDatabaseRowKeysConverterAdapter.java | 39 ++++
.../converter/HBaseDatabaseSelectConverter.java | 162 ++++++++++++++
.../converter/HBaseDatabaseUpdateConverter.java | 97 +++++++++
.../HBaseDeleteOperationAdapter.java} | 32 ++-
.../HBaseInsertOperationAdapter.java} | 30 ++-
.../converter/HBaseRegionReloadConverter.java | 60 +++++
.../HBaseSelectOperationAdapter.java} | 32 ++-
.../HBaseUpdateOperationAdapter.java} | 32 ++-
.../HBaseBackgroundExecutorManager.java | 2 +-
.../{connector => executor}/HBaseExecutor.java | 5 +-
.../HBaseTaskExecutorManager.java | 2 +-
.../impl/HBaseDatabaseBackendUpdateHandler.java | 58 +++++
.../{connector => result}/HBaseAdminCallback.java | 2 +-
.../{connector => result}/HBaseQueryCallback.java | 2 +-
.../{connector => result}/HBaseUpdateCallback.java | 2 +-
.../query/HBaseDatabaseDescribeResultSet.java | 100 +++++++++
.../result/query/HBaseDatabaseGetResultSet.java | 242 +++++++++++++++++++++
.../result/query/HBaseDatabaseListResultSet.java | 117 ++++++++++
.../result/query/HBaseDatabaseQueryResultSet.java | 56 +++++
.../result/update/HBaseDatabaseDeleteUpdater.java | 51 +++++
.../result/update/HBaseDatabaseInsertUpdater.java | 52 +++++
.../result/update/HBaseDatabaseUpdateUpdater.java | 55 +++++
.../update/HBaseDatabaseUpdater.java} | 22 +-
.../result/update/HBaseRegionReloadUpdater.java | 69 ++++++
.../backend/hbase/util/HeterogeneousUtil.java | 82 +++++++
.../HBaseDatabaseConverterFactoryTest.java | 67 ++++++
.../HBaseDatabaseDeleteConverterTest.java | 55 +++++
.../HBaseDatabaseRowKeysConverterAdapterTest.java | 46 ++++
.../HBaseDatabaseUpdateConverterTest.java | 55 +++++
.../hbase/result/HBaseSupportedSQLStatement.java | 123 +++++++++++
.../AbstractHBaseDatabaseQueryResultSetTest.java | 106 +++++++++
.../query/HBaseDatabaseDescribeResultSetTest.java | 74 +++++++
.../query/HBaseDatabaseListResultSetTest.java | 91 ++++++++
.../HBaseDatabaseBackendUpdateHandlerTest.java | 71 ++++++
.../backend/hbase/util/HeterogeneousUtilTest.java | 38 ++++
.../test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java | 5 +-
47 files changed, 2327 insertions(+), 99 deletions(-)
diff --git
a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/SQLStatementContextFactory.java
b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/SQLStatementContextFactory.java
index 0d0954d8c92..db4b121ccbe 100644
---
a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/SQLStatementContextFactory.java
+++
b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/SQLStatementContextFactory.java
@@ -121,7 +121,7 @@ public final class SQLStatementContextFactory {
/**
* Create SQL statement context.
*
- * @param metaData metaData
+ * @param metaData metadata
* @param sqlStatement SQL statement
* @param defaultDatabaseName default database name
* @return SQL statement context
@@ -133,7 +133,7 @@ public final class SQLStatementContextFactory {
/**
* Create SQL statement context.
*
- * @param metaData metaData
+ * @param metaData metadata
* @param params SQL parameters
* @param sqlStatement SQL statement
* @param defaultDatabaseName default database name
diff --git a/proxy/backend/type/hbase/pom.xml b/proxy/backend/type/hbase/pom.xml
index efbffe02fd1..ba2cfe34ceb 100644
--- a/proxy/backend/type/hbase/pom.xml
+++ b/proxy/backend/type/hbase/pom.xml
@@ -42,5 +42,17 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-shaded-client</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.shardingsphere</groupId>
+ <artifactId>shardingsphere-proxy-backend-core</artifactId>
+ <version>${project.version}</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.shardingsphere</groupId>
+ <artifactId>shardingsphere-test-util</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/bean/HBaseOperation.java
similarity index 68%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/bean/HBaseOperation.java
index c174113f5fe..c56d98b81c1 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/bean/HBaseOperation.java
@@ -15,21 +15,17 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.bean;
-import org.apache.hadoop.hbase.client.Table;
-import java.io.IOException;
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+import org.apache.hadoop.hbase.client.Operation;
-/**
- * HBase update callback.
- */
-public interface HBaseUpdateCallback {
+@RequiredArgsConstructor
+@Getter
+public final class HBaseOperation {
+
+ private final String tableName;
- /**
- * Execute in HBase.
- *
- * @param table table
- * @throws IOException IO exception
- */
- void executeInHBase(Table table) throws IOException;
+ private final Operation operation;
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java
index c508ea3dd93..7e5c43146a3 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java
@@ -33,7 +33,7 @@ public final class YamlHBaseConfiguration implements
YamlConfiguration {
private String databaseName;
- private Map<String, Object> dataSourceCommon;
+ private Map<String, Object> commonDataSourceProps;
private Map<String, YamlHBaseParameter> dataSources = new HashMap<>();
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseContext.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseContext.java
index bebb2de3831..db7be27505c 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseContext.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseContext.java
@@ -26,8 +26,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseCluster;
-import
org.apache.shardingsphere.proxy.backend.hbase.connector.HBaseBackgroundExecutorManager;
-import org.apache.shardingsphere.proxy.backend.hbase.connector.HBaseExecutor;
+import
org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseBackgroundExecutorManager;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
import org.apache.shardingsphere.proxy.backend.hbase.props.HBaseProperties;
import org.apache.shardingsphere.proxy.backend.hbase.props.HBasePropertyKey;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
index 72f41498725..fc3785b5535 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
@@ -26,7 +26,7 @@ import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationExc
*/
@RequiredArgsConstructor
@Slf4j
-public class HBaseMetaDataRefresher implements Runnable {
+public final class HBaseMetaDataRefresher implements Runnable {
private final HBaseContext context;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java
index b6319c15e63..8e4deba5a5e 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java
@@ -20,9 +20,11 @@ package
org.apache.shardingsphere.proxy.backend.hbase.context;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseCluster;
-import
org.apache.shardingsphere.proxy.backend.hbase.connector.HBaseTaskExecutorManager;
+import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import
org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseTaskExecutorManager;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
@@ -72,6 +74,25 @@ public final class HBaseRegionWarmUpContext {
executorManager.submit(() -> loadRegionInfo(tableName, hbaseCluster));
}
+ /**
+ * Load one table region info.
+ *
+ * @param tableName table name
+ * @param connection HBase connection
+ */
+ public void loadRegionInfo(final String tableName, final Connection
connection) {
+ HBaseRegionWarmUpContext.getInstance().addExecuteCount();
+ try {
+ if (connection == null) {
+ return;
+ }
+ RegionLocator regionLocator =
connection.getRegionLocator(TableName.valueOf(tableName));
+ regionLocator.getAllRegionLocations();
+ } catch (IOException e) {
+ throw new HBaseOperationException(String.format("table: %s warm up
error, getRegionLocator execute error reason is %s", tableName, e));
+ }
+ }
+
private void loadRegionInfo(final String tableName, final HBaseCluster
hbaseCluster) {
try {
RegionLocator regionLocator =
hbaseCluster.getConnection().getRegionLocator(TableName.valueOf(tableName));
@@ -112,7 +133,7 @@ public final class HBaseRegionWarmUpContext {
/**
* Sync execute.
*
- * @param clusterName clusterName
+ * @param clusterName cluster name
*/
public void syncExecuteWarmUp(final String clusterName) {
while (executeCount.get() < tableCount.get()) {
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverter.java
similarity index 70%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverter.java
index c174113f5fe..8d86e260cbb 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverter.java
@@ -15,21 +15,19 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
-import org.apache.hadoop.hbase.client.Table;
-import java.io.IOException;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
/**
- * HBase update callback.
+ * Convert SQL statement to HBase operation.
*/
-public interface HBaseUpdateCallback {
+public interface HBaseDatabaseConverter {
/**
- * Execute in HBase.
- *
- * @param table table
- * @throws IOException IO exception
+ * Convert SQL statement to HBase operation.
+ *
+ * @return HBase operation
*/
- void executeInHBase(Table table) throws IOException;
+ HBaseOperation convert();
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverterFactory.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverterFactory.java
new file mode 100644
index 00000000000..4094b02fd56
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverterFactory.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import lombok.AccessLevel;
+import lombok.NoArgsConstructor;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dal.FlushStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.DeleteStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.InsertStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.UpdateStatementContext;
+import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+
+/**
+ * HBase database converter factory.
+ */
+@NoArgsConstructor(access = AccessLevel.PRIVATE)
+public final class HBaseDatabaseConverterFactory {
+
+ /**
+ * Create new instance of HBase database converter.
+ *
+ * @param sqlStatementContext sql statement context
+ * @return instance of converter
+ */
+ public static HBaseDatabaseConverter newInstance(final
SQLStatementContext<?> sqlStatementContext) {
+ if (sqlStatementContext instanceof SelectStatementContext) {
+ return new HBaseDatabaseSelectConverter(sqlStatementContext);
+ } else if (sqlStatementContext instanceof InsertStatementContext) {
+ return new HBaseDatabaseInsertConverter(sqlStatementContext);
+ } else if (sqlStatementContext instanceof DeleteStatementContext) {
+ return new HBaseDatabaseDeleteConverter(sqlStatementContext);
+ } else if (sqlStatementContext instanceof UpdateStatementContext) {
+ return new HBaseDatabaseUpdateConverter(sqlStatementContext);
+ } else if (sqlStatementContext instanceof FlushStatementContext) {
+ return new HBaseRegionReloadConverter(sqlStatementContext);
+ } else {
+ throw new HBaseOperationException("Can't found converter");
+ }
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseDeleteConverter.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseDeleteConverter.java
new file mode 100644
index 00000000000..b95d3bc697b
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseDeleteConverter.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import com.google.common.base.Preconditions;
+import lombok.RequiredArgsConstructor;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.DeleteStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * HBase database delete converter.
+ */
+@RequiredArgsConstructor
+public final class HBaseDatabaseDeleteConverter extends
HBaseDatabaseRowKeysConverterAdapter implements HBaseDatabaseConverter {
+
+ private final SQLStatementContext<?> sqlStatementContext;
+
+ /**
+ * Convert SQL statement to HBase operation.
+ *
+ * @return HBase operation
+ */
+ @Override
+ public HBaseOperation convert() {
+ DeleteStatementContext context = (DeleteStatementContext)
sqlStatementContext;
+ if (context.getWhereSegments().stream().findFirst().isPresent() &&
context.getWhereSegments().stream().findFirst().get().getExpr() instanceof
InExpression) {
+ return createDeleteOperationByUseIn(context);
+ } else {
+ return createDeleteOperationByOneRowKey(context);
+ }
+ }
+
+ private HBaseOperation createDeleteOperationByOneRowKey(final
DeleteStatementContext context) {
+ String tableName =
context.getTablesContext().getTableNames().iterator().next();
+
Preconditions.checkArgument(context.getWhereSegments().stream().findFirst().isPresent(),
"where segment is absent");
+ String rowKey =
getRowKeyFromWhereSegment(context.getWhereSegments().stream().findFirst().get().getExpr());
+ return new HBaseOperation(tableName, getDeleteByRowKey(rowKey));
+ }
+
+ private HBaseOperation createDeleteOperationByUseIn(final
DeleteStatementContext context) {
+ String tableName =
context.getTablesContext().getTableNames().iterator().next();
+ List<String> rowKeys = getRowKeysFromWhereSegmentByIn((InExpression)
context.getWhereSegments().stream().findFirst().get().getExpr());
+ List<Delete> deletes =
rowKeys.stream().map(this::getDeleteByRowKey).collect(Collectors.toList());
+ return new HBaseOperation(tableName, new
HBaseDeleteOperationAdapter(tableName, deletes));
+ }
+
+ private Delete getDeleteByRowKey(final String rowKey) {
+ return new Delete(Bytes.toBytes(rowKey));
+ }
+
+ private String getRowKeyFromWhereSegment(final ExpressionSegment
expressionSegment) {
+ BinaryOperationExpression expression = (BinaryOperationExpression)
expressionSegment;
+ return String.valueOf(((LiteralExpressionSegment)
expression.getRight()).getLiterals());
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseInsertConverter.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseInsertConverter.java
new file mode 100644
index 00000000000..725878a8d1f
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseInsertConverter.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import lombok.RequiredArgsConstructor;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import
org.apache.shardingsphere.infra.binder.segment.insert.values.InsertValueContext;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.InsertStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * HBase database insert converter.
+ */
+@RequiredArgsConstructor
+public final class HBaseDatabaseInsertConverter implements
HBaseDatabaseConverter {
+
+ private final SQLStatementContext<?> sqlStatementContext;
+
+ /**
+ * Convert SQL statement to HBase operation.
+ *
+ * @return HBase operation
+ */
+ @Override
+ public HBaseOperation convert() {
+ InsertStatementContext context = (InsertStatementContext)
sqlStatementContext;
+ String tableName =
context.getTablesContext().getTableNames().iterator().next();
+ return new HBaseOperation(tableName, new
HBaseInsertOperationAdapter(createHBaseRequest(context)));
+ }
+
+ private Put generateHBaseRequest(final InsertStatementContext context,
final InsertValueContext insertValueContext) {
+ List<String> columns = context.getInsertColumnNames();
+ List<Object> values =
insertValueContext.getValueExpressions().stream().map(each ->
((LiteralExpressionSegment) each).getLiterals()).collect(Collectors.toList());
+ Put result = new Put(Bytes.toBytes(String.valueOf(values.get(0))));
+ for (int i = 1; i < columns.size(); i++) {
+
result.addColumn(Bytes.toBytes(HBaseContext.getInstance().getColumnFamily()),
Bytes.toBytes(String.valueOf(columns.get(i))),
Bytes.toBytes(String.valueOf(values.get(i))));
+ }
+ return result;
+ }
+
+ private List<Put> createHBaseRequest(final InsertStatementContext context)
{
+ return context.getInsertValueContexts().stream().map(each ->
generateHBaseRequest(context, each)).collect(Collectors.toList());
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseRowKeysConverterAdapter.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseRowKeysConverterAdapter.java
new file mode 100644
index 00000000000..1dda4dde1b8
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseRowKeysConverterAdapter.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * HBase database row keys converter adapter.
+ */
+public class HBaseDatabaseRowKeysConverterAdapter {
+
+ protected List<String> getRowKeysFromWhereSegmentByIn(final InExpression
expression) {
+ return ((ListExpression)
expression.getRight()).getItems().stream().map(this::getValueByExpressionSegment).collect(Collectors.toList());
+ }
+
+ protected String getValueByExpressionSegment(final ExpressionSegment item)
{
+ return String.valueOf(((LiteralExpressionSegment) item).getLiterals());
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseSelectConverter.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseSelectConverter.java
new file mode 100644
index 00000000000..da2f2bc41e8
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseSelectConverter.java
@@ -0,0 +1,162 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Query;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import org.apache.shardingsphere.proxy.backend.hbase.util.HeterogeneousUtil;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.PaginationValueSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.NumberLiteralLimitValueSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLSelectStatement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+/**
+ * HBase database select converter.
+ */
+@RequiredArgsConstructor
+@Slf4j
+public final class HBaseDatabaseSelectConverter extends
HBaseDatabaseRowKeysConverterAdapter implements HBaseDatabaseConverter {
+
+ private final SQLStatementContext<?> sqlStatementContext;
+
+ /**
+ * Convert SQL statement to HBase operation.
+ *
+ * @return HBase operation
+ */
+ @Override
+ public HBaseOperation convert() {
+ SelectStatementContext context = (SelectStatementContext)
sqlStatementContext;
+ if (isUseGetRequest(context)) {
+ return createGetRequest(context);
+ } else {
+ return createScanRequest(context);
+ }
+ }
+
+ private boolean isUseGetRequest(final SelectStatementContext context) {
+ return context.getWhereSegments().stream().findFirst().isPresent()
+ &&
(context.getWhereSegments().stream().findFirst().get().getExpr() instanceof
BinaryOperationExpression
+ ||
context.getWhereSegments().stream().findFirst().get().getExpr() instanceof
InExpression);
+ }
+
+ private List<String> getRowKeyFromWhereSegment(final ExpressionSegment
expressionSegment) {
+ if (expressionSegment instanceof InExpression) {
+ InExpression expression = (InExpression) expressionSegment;
+ return getRowKeysFromWhereSegmentByIn(expression);
+ } else {
+ BinaryOperationExpression expression = (BinaryOperationExpression)
expressionSegment;
+ return new
ArrayList<>(Collections.singleton(String.valueOf(((LiteralExpressionSegment)
expression.getRight()).getLiterals())));
+ }
+ }
+
+ private HBaseOperation createGetRequest(final SelectStatementContext
context) {
+ ExpressionSegment expression =
context.getWhereSegments().stream().findFirst().get().getExpr();
+ List<Get> gets =
getRowKeyFromWhereSegment(expression).stream().map(this::getGetByRowKey).collect(Collectors.toList());
+ if (!HeterogeneousUtil.isUseShorthandProjection(context)) {
+ for (Get each : gets) {
+ decorateWithColumns(each, context);
+ }
+ }
+ if (expression instanceof InExpression) {
+ return new
HBaseOperation(context.getTablesContext().getTableNames().iterator().next(),
+ new
HBaseSelectOperationAdapter(context.getTablesContext().getTableNames().iterator().next(),
gets));
+ }
+ return new
HBaseOperation(context.getTablesContext().getTableNames().iterator().next(),
gets.get(0));
+ }
+
+ private Get getGetByRowKey(final String rowKey) {
+ return new Get(Bytes.toBytes(rowKey));
+ }
+
+ private void decorateWithColumns(final Query query, final
SelectStatementContext statementContext) {
+ Collection<ColumnSegment> columns =
statementContext.getColumnSegments();
+
+ if (query instanceof Get) {
+ columns.forEach(each -> ((Get)
query).addColumn(Bytes.toBytes("i"), Bytes.toBytes(String.valueOf(each))));
+ } else {
+ columns.forEach(each -> ((Scan)
query).addColumn(Bytes.toBytes("i"), Bytes.toBytes(String.valueOf(each))));
+ }
+ }
+
+ private void decoratedWithLimit(final Scan scan, final
SelectStatementContext statementContext) {
+ MySQLSelectStatement selectStatement = (MySQLSelectStatement)
statementContext.getSqlStatement();
+ if (selectStatement.getLimit().isPresent()) {
+ Optional<PaginationValueSegment> paginationValueSegment =
selectStatement.getLimit().get().getRowCount();
+ paginationValueSegment.ifPresent(valueSegment ->
scan.setLimit((int) ((NumberLiteralLimitValueSegment)
valueSegment).getValue()));
+ }
+ }
+
+ private HBaseOperation createScanRequest(final SelectStatementContext
context) {
+ Scan scan = new Scan();
+ Optional<WhereSegment> whereSegment =
context.getWhereSegments().stream().findFirst();
+ if (whereSegment.isPresent() && whereSegment.get().getExpr()
instanceof BetweenExpression) {
+ decorateScanOperationWithBetweenExpression(scan,
whereSegment.get().getExpr(), false);
+ }
+ if (!HeterogeneousUtil.isUseShorthandProjection(context)) {
+ decorateWithColumns(scan, context);
+ }
+ decoratedWithLimit(scan, context);
+ return new
HBaseOperation(context.getTablesContext().getTableNames().iterator().next(),
scan);
+ }
+
+ private void decorateScanOperationWithBetweenExpression(final Scan scan,
final ExpressionSegment expressionSegment, final boolean reversed) {
+ BetweenExpression betweenExpression = (BetweenExpression)
expressionSegment;
+ LiteralExpressionSegment betweenExpr = (LiteralExpressionSegment)
betweenExpression.getBetweenExpr();
+ LiteralExpressionSegment andExpr = (LiteralExpressionSegment)
betweenExpression.getAndExpr();
+ String startRowKey = betweenExpr.getLiterals().toString();
+ String stopRowKey = andExpr.getLiterals().toString();
+ if (null != startRowKey && null != stopRowKey) {
+ if (reversed) {
+ scan.withStopRow(calBytes(startRowKey, 0), true);
+ // refer:
<https://github.com/apache/hbase/blob/master/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java#L1853>
+ scan.withStartRow(calBytes(stopRowKey + "~", 0), true);
+ } else {
+ scan.withStartRow(calBytes(startRowKey, 0), true);
+ scan.withStopRow(calBytes(stopRowKey + "~", 0), true);
+ }
+ }
+ }
+
+ private byte[] calBytes(final String row, final int step) {
+ byte[] rowByte = Bytes.toBytes(row);
+ byte[] result = Arrays.copyOf(rowByte, rowByte.length);
+ result[result.length - 1] = (byte) (result[result.length - 1] + step);
+ return result;
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseUpdateConverter.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseUpdateConverter.java
new file mode 100644
index 00000000000..15009668e6a
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseUpdateConverter.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import com.google.common.base.Preconditions;
+import lombok.RequiredArgsConstructor;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.UpdateStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.AssignmentSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import java.util.Collection;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * HBase database update converter.
+ */
+@RequiredArgsConstructor
+public final class HBaseDatabaseUpdateConverter extends
HBaseDatabaseRowKeysConverterAdapter implements HBaseDatabaseConverter {
+
+ private final SQLStatementContext<?> sqlStatementContext;
+
+ /**
+ * Convert SQL statement to HBase operation.
+ *
+ * @return HBase operation
+ */
+ @Override
+ public HBaseOperation convert() {
+ UpdateStatementContext context = (UpdateStatementContext)
sqlStatementContext;
+
Preconditions.checkArgument(context.getWhereSegments().stream().findFirst().isPresent(),
"Where segment is not present");
+ if (context.getWhereSegments().stream().findFirst().get().getExpr()
instanceof InExpression) {
+ return createHBasePutsOperation(context);
+ }
+ return new
HBaseOperation(context.getTablesContext().getTableNames().iterator().next(),
createHBaseRequest(context));
+ }
+
+ private HBaseOperation createHBasePutsOperation(final
UpdateStatementContext context) {
+ List<String> rowKeys = getRowKeysFromWhereSegmentByIn((InExpression)
context.getWhereSegments().stream().findFirst().get().getExpr());
+ List<Put> puts =
rowKeys.stream().map(this::getPutByRowKey).collect(Collectors.toList());
+ for (Put put : puts) {
+ addPutColumn(context, put);
+ }
+ return new
HBaseOperation(context.getTablesContext().getTableNames().iterator().next(),
+ new
HBaseUpdateOperationAdapter(context.getTablesContext().getTableNames().iterator().next(),
puts));
+ }
+
+ private Put getPutByRowKey(final String rowKey) {
+ return new Put(Bytes.toBytes(rowKey));
+ }
+
+ private Put createHBaseRequest(final UpdateStatementContext context) {
+ String rowKey =
getRowKeyFromWhereSegment(context.getWhereSegments().stream().findFirst().get().getExpr());
+ Put result = getPutByRowKey(rowKey);
+ addPutColumn(context, result);
+ return result;
+ }
+
+ private void addPutColumn(final UpdateStatementContext context, final Put
put) {
+ for (AssignmentSegment segment : getAssignmentSegments(context)) {
+ String column =
segment.getColumns().iterator().next().getIdentifier().getValue();
+ LiteralExpressionSegment literalExpressionSegment =
(LiteralExpressionSegment) segment.getValue();
+ String value =
String.valueOf(literalExpressionSegment.getLiterals());
+ put.addColumn(Bytes.toBytes("i"), Bytes.toBytes(column),
Bytes.toBytes(value));
+ }
+ }
+
+ private Collection<AssignmentSegment> getAssignmentSegments(final
UpdateStatementContext context) {
+ return context.getSqlStatement().getSetAssignment().getAssignments();
+ }
+
+ private String getRowKeyFromWhereSegment(final ExpressionSegment
expressionSegment) {
+ BinaryOperationExpression expression = (BinaryOperationExpression)
expressionSegment;
+ return String.valueOf(((LiteralExpressionSegment)
expression.getRight()).getLiterals());
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDeleteOperationAdapter.java
similarity index 58%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDeleteOperationAdapter.java
index 72f41498725..d532cbbe0fb 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDeleteOperationAdapter.java
@@ -15,26 +15,34 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.context;
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+import lombok.Getter;
import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Operation;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
/**
- * HBase meta data refresher.
+ * HBase database delete converter.
*/
@RequiredArgsConstructor
-@Slf4j
-public class HBaseMetaDataRefresher implements Runnable {
+@Getter
+public final class HBaseDeleteOperationAdapter extends Operation {
- private final HBaseContext context;
+ private final String tableName;
+
+ private final List<Delete> deletes;
+
+ @Override
+ public Map<String, Object> getFingerprint() {
+ return new TreeMap<>();
+ }
@Override
- public void run() {
- try {
- context.getConnections().forEach(context::loadTables);
- } catch (final HBaseOperationException ignored) {
- }
+ public Map<String, Object> toMap(final int i) {
+ return new TreeMap<>();
}
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseInsertOperationAdapter.java
similarity index 60%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseInsertOperationAdapter.java
index 72f41498725..1c4231f591a 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseInsertOperationAdapter.java
@@ -15,26 +15,32 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.context;
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+import lombok.Getter;
import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import org.apache.hadoop.hbase.client.Operation;
+import org.apache.hadoop.hbase.client.Put;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
/**
- * HBase meta data refresher.
+ * HBase database insert operation adapter.
*/
@RequiredArgsConstructor
-@Slf4j
-public class HBaseMetaDataRefresher implements Runnable {
+@Getter
+public final class HBaseInsertOperationAdapter extends Operation {
- private final HBaseContext context;
+ private final List<Put> puts;
@Override
- public void run() {
- try {
- context.getConnections().forEach(context::loadTables);
- } catch (final HBaseOperationException ignored) {
- }
+ public Map<String, Object> getFingerprint() {
+ return new TreeMap<>();
+ }
+
+ @Override
+ public Map<String, Object> toMap(final int i) {
+ return new TreeMap<>();
}
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseRegionReloadConverter.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseRegionReloadConverter.java
new file mode 100644
index 00000000000..26b3347c458
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseRegionReloadConverter.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import lombok.RequiredArgsConstructor;
+import org.apache.hadoop.hbase.client.Operation;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dal.FlushStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.stream.Collectors;
+
+/**
+ * HBase database region reload converter.
+ */
+@RequiredArgsConstructor
+public final class HBaseRegionReloadConverter implements
HBaseDatabaseConverter {
+
+ private final SQLStatementContext<?> sqlStatementContext;
+
+ private Operation getOperation() {
+ return new Operation() {
+
+ @Override
+ public Map<String, Object> getFingerprint() {
+ return new TreeMap<>();
+ }
+
+ @Override
+ public Map<String, Object> toMap(final int i) {
+ return new TreeMap<>();
+ }
+ };
+ }
+
+ @Override
+ public HBaseOperation convert() {
+ List<String> tables = ((FlushStatementContext)
sqlStatementContext).getAllTables()
+ .stream().map(simpleTableSegment ->
simpleTableSegment.getTableName().getIdentifier().getValue()).collect(Collectors.toList());
+
+ return new HBaseOperation(String.join(",", tables), getOperation());
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseSelectOperationAdapter.java
similarity index 58%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseSelectOperationAdapter.java
index 72f41498725..367c75f48f3 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseSelectOperationAdapter.java
@@ -15,26 +15,34 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.context;
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+import lombok.Getter;
import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Operation;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
/**
- * HBase meta data refresher.
+ * HBase database select operation adapter.
*/
@RequiredArgsConstructor
-@Slf4j
-public class HBaseMetaDataRefresher implements Runnable {
+@Getter
+public final class HBaseSelectOperationAdapter extends Operation {
- private final HBaseContext context;
+ private final String tableName;
+
+ private final List<Get> gets;
+
+ @Override
+ public Map<String, Object> getFingerprint() {
+ return new TreeMap<>();
+ }
@Override
- public void run() {
- try {
- context.getConnections().forEach(context::loadTables);
- } catch (final HBaseOperationException ignored) {
- }
+ public Map<String, Object> toMap(final int i) {
+ return new TreeMap<>();
}
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseUpdateOperationAdapter.java
similarity index 58%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseUpdateOperationAdapter.java
index 72f41498725..c9fbf888cb9 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseMetaDataRefresher.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseUpdateOperationAdapter.java
@@ -15,26 +15,34 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.context;
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+import lombok.Getter;
import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import org.apache.hadoop.hbase.client.Operation;
+import org.apache.hadoop.hbase.client.Put;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
/**
- * HBase meta data refresher.
+ * HBase database update operation adapter.
*/
@RequiredArgsConstructor
-@Slf4j
-public class HBaseMetaDataRefresher implements Runnable {
+@Getter
+public final class HBaseUpdateOperationAdapter extends Operation {
- private final HBaseContext context;
+ private final String tableName;
+
+ private final List<Put> puts;
+
+ @Override
+ public Map<String, Object> getFingerprint() {
+ return new TreeMap<>();
+ }
@Override
- public void run() {
- try {
- context.getConnections().forEach(context::loadTables);
- } catch (final HBaseOperationException ignored) {
- }
+ public Map<String, Object> toMap(final int i) {
+ return new TreeMap<>();
}
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseBackgroundExecutorManager.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseBackgroundExecutorManager.java
similarity index 96%
rename from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseBackgroundExecutorManager.java
rename to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseBackgroundExecutorManager.java
index b31bbc8cda2..045bfbe5c16 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseBackgroundExecutorManager.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseBackgroundExecutorManager.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.executor;
import
org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorThreadFactoryBuilder;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseExecutor.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseExecutor.java
similarity index 92%
rename from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseExecutor.java
rename to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseExecutor.java
index 2f3c2c5e803..4a636d5492a 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseExecutor.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseExecutor.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.executor;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
@@ -24,6 +24,9 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.shardingsphere.proxy.backend.hbase.result.HBaseAdminCallback;
+import org.apache.shardingsphere.proxy.backend.hbase.result.HBaseQueryCallback;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseUpdateCallback;
import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseTaskExecutorManager.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseTaskExecutorManager.java
similarity index 96%
rename from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseTaskExecutorManager.java
rename to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseTaskExecutorManager.java
index 864ac333c4e..bc2fcb239c6 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseTaskExecutorManager.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/executor/HBaseTaskExecutorManager.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.executor;
import java.io.Closeable;
import java.util.concurrent.ArrayBlockingQueue;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/impl/HBaseDatabaseBackendUpdateHandler.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/impl/HBaseDatabaseBackendUpdateHandler.java
new file mode 100644
index 00000000000..4bc59263455
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/impl/HBaseDatabaseBackendUpdateHandler.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.impl;
+
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+import org.apache.shardingsphere.infra.binder.SQLStatementContextFactory;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import
org.apache.shardingsphere.proxy.backend.handler.data.DatabaseBackendHandler;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseDatabaseConverter;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseDatabaseConverterFactory;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.update.HBaseDatabaseUpdater;
+import
org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import java.util.Collection;
+
+/**
+ * HBase database backend updater handler.
+ */
+@RequiredArgsConstructor
+@Getter
+public final class HBaseDatabaseBackendUpdateHandler implements
DatabaseBackendHandler {
+
+ private final SQLStatement sqlStatement;
+
+ private final HBaseDatabaseUpdater updater;
+
+ /**
+ * Execute HBase update handler.
+ *
+ * @return update response header
+ */
+ @Override
+ public UpdateResponseHeader execute() {
+ SQLStatementContext<?> sqlStatementContext =
SQLStatementContextFactory.newInstance(null, sqlStatement, "");
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ HBaseOperation hbaseOperation = converter.convert();
+ Collection<UpdateResult> updateResults =
updater.executeUpdate(hbaseOperation);
+ return new UpdateResponseHeader(sqlStatement, updateResults);
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseAdminCallback.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseAdminCallback.java
similarity index 94%
copy from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseAdminCallback.java
copy to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseAdminCallback.java
index 9f373d897c0..44a140783e1 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseAdminCallback.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseAdminCallback.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.result;
import org.apache.hadoop.hbase.client.Admin;
import java.io.IOException;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseQueryCallback.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseQueryCallback.java
similarity index 94%
rename from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseQueryCallback.java
rename to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseQueryCallback.java
index 56ff717c844..774ec60617d 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseQueryCallback.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseQueryCallback.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.result;
import org.apache.hadoop.hbase.client.Table;
import java.io.IOException;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseUpdateCallback.java
similarity index 94%
rename from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
rename to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseUpdateCallback.java
index c174113f5fe..5bc4e2db1e8 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseUpdateCallback.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseUpdateCallback.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.result;
import org.apache.hadoop.hbase.client.Table;
import java.io.IOException;
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseDescribeResultSet.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseDescribeResultSet.java
new file mode 100644
index 00000000000..f3dde4300d0
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseDescribeResultSet.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dal.ShowCreateTableStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dal.MySQLShowCreateTableStatement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Result set for HBase databases describe.
+ */
+public final class HBaseDatabaseDescribeResultSet implements
HBaseDatabaseQueryResultSet {
+
+ private Iterator<HTableDescriptor> iterator;
+
+ /**
+ * Init data.
+ *
+ * @param sqlStatementContext SQL statement context
+ */
+ @Override
+ public void init(final SQLStatementContext<?> sqlStatementContext) {
+ ShowCreateTableStatementContext statementContext =
(ShowCreateTableStatementContext) sqlStatementContext;
+ String tableName =
statementContext.getTablesContext().getTableNames().iterator().next();
+ boolean isExists =
HBaseExecutor.executeAdmin(HBaseContext.getInstance().getConnection(tableName),
admin -> admin.tableExists(TableName.valueOf(tableName)));
+ Preconditions.checkArgument(isExists, String.format("Table %s is not
exists", tableName));
+ HTableDescriptor hTableDescriptor =
HBaseExecutor.executeAdmin(HBaseContext.getInstance().getConnection(tableName),
admin -> admin.getTableDescriptor(TableName.valueOf(tableName)));
+ List<HTableDescriptor> tables =
Collections.singletonList(hTableDescriptor);
+ iterator = tables.iterator();
+ }
+
+ /**
+ * Get result set column names.
+ *
+ * @return result set column names
+ */
+ @Override
+ public Collection<String> getColumnNames() {
+ return Arrays.asList("Name", "TableAttributes",
"FlushPolicyClassName", "MaxFileSize", "MemStoreFlushSize",
+ "Priority", "RegionReplication", "RegionSplitPolicyClassName",
"CustomizedValues");
+ }
+
+ /**
+ * Go to next data.
+ *
+ * @return true if next data exist
+ */
+ @Override
+ public boolean next() {
+ return iterator.hasNext();
+ }
+
+ /**
+ * Get row data.
+ *
+ * @return row data
+ */
+ @Override
+ public Collection<Object> getRowData() {
+ HTableDescriptor descriptor = iterator.next();
+ return Arrays.asList(descriptor.getNameAsString(),
descriptor.toStringTableAttributes(), descriptor.getFlushPolicyClassName(),
descriptor.getMaxFileSize(),
+ descriptor.getMemStoreFlushSize(), descriptor.getPriority(),
descriptor.getRegionReplication(), descriptor.getRegionSplitPolicyClassName(),
+ descriptor.toStringCustomizedValues());
+ }
+
+ /**
+ * Get type.
+ *
+ * @return type name
+ */
+ @Override
+ public String getType() {
+ return MySQLShowCreateTableStatement.class.getCanonicalName();
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseGetResultSet.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseGetResultSet.java
new file mode 100644
index 00000000000..c1007fff26b
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseGetResultSet.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseDatabaseConverter;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseDatabaseConverterFactory;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseSelectOperationAdapter;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
+import org.apache.shardingsphere.proxy.backend.hbase.props.HBasePropertyKey;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.PaginationValueSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.NumberLiteralLimitValueSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLSelectStatement;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.TreeMap;
+import java.util.stream.Collectors;
+
+/**
+ * Result set for HBase databases.
+ */
+@Slf4j
+public final class HBaseDatabaseGetResultSet implements
HBaseDatabaseQueryResultSet {
+
+ private SelectStatementContext statementContext;
+
+ private Collection<String> columns = Collections.singleton("rowKey");
+
+ private Result compensateResult;
+
+ private Iterator<Result> iterator;
+
+ private long resultNum;
+
+ private long maxLimitResultSize;
+
+ /**
+ * Init data.
+ *
+ * @param sqlStatementContext SQL statement context.
+ */
+ @Override
+ public void init(final SQLStatementContext<?> sqlStatementContext) {
+ statementContext = (SelectStatementContext) sqlStatementContext;
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ HBaseOperation hbaseOperation = converter.convert();
+ initResultNum(sqlStatementContext);
+ final long startMill = System.currentTimeMillis();
+ if (hbaseOperation.getOperation() instanceof Get) {
+ executeGetRequest(hbaseOperation);
+ } else if (hbaseOperation.getOperation() instanceof
HBaseSelectOperationAdapter) {
+ executeGetsRequest(hbaseOperation);
+ } else {
+ executeScanRequest(hbaseOperation);
+ }
+
+ final long endMill = System.currentTimeMillis();
+
+ printExecuteTime(endMill, startMill);
+ }
+
+ private void printExecuteTime(final long endMill, final long startMill) {
+ String hbTable;
+
+ if (statementContext.getSqlStatement().getFrom() instanceof
SimpleTableSegment) {
+ hbTable = ((SimpleTableSegment)
statementContext.getSqlStatement().getFrom()).getTableName().getIdentifier().getValue();
+ } else {
+ hbTable = statementContext.getSqlStatement().getFrom().toString();
+ }
+
+ String whereCase = "";
+
+ if (statementContext.getSqlStatement().getWhere().isPresent()) {
+ ExpressionSegment expressionSegment =
statementContext.getSqlStatement().getWhere().get().getExpr();
+ if (expressionSegment instanceof BetweenExpression) {
+ whereCase += ((BetweenExpression)
expressionSegment).getBetweenExpr();
+ } else if (expressionSegment instanceof BinaryOperationExpression)
{
+ whereCase += ((BinaryOperationExpression)
expressionSegment).getText();
+ }
+ }
+ if (endMill - startMill >
HBaseContext.getInstance().getProps().<Long>getValue(HBasePropertyKey.EXECUTE_TIME_OUT))
{
+ log.info(String.format("query hbase table: %s, where case: %s ,
query %dms time out", hbTable, whereCase, endMill - startMill));
+ } else {
+ log.info(String.format("query hbase table: %s, where case: %s ,
execute time: %dms", hbTable, whereCase, endMill - startMill));
+ }
+ }
+
+ private void initResultNum(final SQLStatementContext<?>
sqlStatementContext) {
+ resultNum = 0;
+ maxLimitResultSize =
HBaseContext.getInstance().getProps().<Long>getValue(HBasePropertyKey.MAX_SCAN_LIMIT_SIZE);
+ Optional<PaginationValueSegment> paginationSegment =
((MySQLSelectStatement)
sqlStatementContext.getSqlStatement()).getLimit().flatMap(LimitSegment::getRowCount);
+ paginationSegment.ifPresent(valueSegment -> maxLimitResultSize =
Math.min(maxLimitResultSize, ((NumberLiteralLimitValueSegment)
valueSegment).getValue()));
+ }
+
+ private void executeGetsRequest(final HBaseOperation hbaseOperation) {
+ List<Result> results =
Arrays.asList(HBaseExecutor.executeQuery(hbaseOperation.getTableName(), table
-> table.get(((HBaseSelectOperationAdapter)
hbaseOperation.getOperation()).getGets())));
+ results = results.stream().filter(result -> result.rawCells().length >
0).collect(Collectors.toList());
+ orderResults(results);
+ iterator = results.iterator();
+ setColumns(iterator);
+ }
+
+ private void orderResults(final List<Result> results) {
+ if (!this.statementContext.getOrderByContext().isGenerated()) {
+ return;
+ }
+ results.sort(this::compareResult);
+ }
+
+ private int compareResult(final Result result1, final Result result2) {
+ return
Bytes.toString(result1.getRow()).compareTo(Bytes.toString(result2.getRow()));
+ }
+
+ private void executeGetRequest(final HBaseOperation hbaseOperation) {
+ Result result =
HBaseExecutor.executeQuery(hbaseOperation.getTableName(), table ->
table.get((Get) hbaseOperation.getOperation()));
+ List<Result> rows = 0 == result.rawCells().length ?
Collections.emptyList() : Collections.singletonList(result);
+ iterator = rows.iterator();
+ setColumns(iterator);
+ }
+
+ private void executeScanRequest(final HBaseOperation hbaseOperation) {
+ Scan scan = (Scan) hbaseOperation.getOperation();
+ scan.setLimit(new Long(maxLimitResultSize).intValue());
+ ResultScanner resultScanner =
HBaseExecutor.executeQuery(hbaseOperation.getTableName(), table ->
table.getScanner(scan));
+ iterator = resultScanner.iterator();
+ setColumns(iterator);
+ }
+
+ private void setColumns(final Iterator<Result> iterator) {
+ if (iterator.hasNext()) {
+ compensateResult = iterator.next();
+ }
+ if (compensateResult != null) {
+ Map<String, String> row = parseResult(compensateResult);
+ columns = row.keySet();
+ } else {
+ columns = Arrays.asList("rowKey", "content");
+ }
+ }
+
+ private Map<String, String> parseResult(final Result result) {
+ Map<String, String> row = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ row.put("rowKey", Bytes.toString(result.getRow()));
+ Long timestamp = null;
+ for (Cell cell : result.listCells()) {
+ String column = new String(CellUtil.cloneQualifier(cell),
StandardCharsets.UTF_8);
+ String value = new String(CellUtil.cloneValue(cell),
StandardCharsets.UTF_8);
+ cell.getTimestamp();
+ if (timestamp == null) {
+ timestamp = cell.getTimestamp();
+ }
+ row.put(column, value);
+ }
+ row.put("timestamp", String.valueOf(timestamp));
+ return row;
+ }
+
+ /**
+ * Get result set column names.
+ *
+ * @return result set column names.
+ */
+ @Override
+ public Collection<String> getColumnNames() {
+ return columns;
+ }
+
+ /**
+ * Go to next data.
+ *
+ * @return true if next data exist.
+ */
+ @Override
+ public boolean next() {
+ return resultNum < maxLimitResultSize && (iterator.hasNext() ||
compensateResult != null);
+ }
+
+ /**
+ * Get row data.
+ *
+ * @return row data.
+ */
+ @Override
+ public Collection<Object> getRowData() {
+ Map<String, String> row;
+ if (compensateResult != null) {
+ row = parseResult(compensateResult);
+ compensateResult = null;
+ } else {
+ row = parseResult(iterator.next());
+ }
+ resultNum++;
+ return columns.stream().map(each -> row.getOrDefault(each,
"")).collect(Collectors.toList());
+ }
+
+ /**
+ * Get Type.
+ *
+ * @return Type Name.
+ */
+ @Override
+ public String getType() {
+ return MySQLSelectStatement.class.getCanonicalName();
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseListResultSet.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseListResultSet.java
new file mode 100644
index 00000000000..f00232c4cdf
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseListResultSet.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dal.ShowTablesStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseCluster;
+import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dal.MySQLShowTablesStatement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * Result set for HBase databases test.
+ */
+public final class HBaseDatabaseListResultSet implements
HBaseDatabaseQueryResultSet {
+
+ private Iterator<Entry<String, String>> iterator;
+
+ /**
+ * Init data.
+ *
+ * @param sqlStatementContext SQL statement context
+ */
+ @Override
+ public void init(final SQLStatementContext<?> sqlStatementContext) {
+ ShowTablesStatementContext context = (ShowTablesStatementContext)
sqlStatementContext;
+ Map<String, String> result;
+ if (context.getSqlStatement().getFromSchema().isPresent()) {
+ String clusterName =
context.getSqlStatement().getFromSchema().get().getSchema().getIdentifier().getValue();
+ result = listTablesInHBaseByFromSchema(clusterName);
+ } else {
+ result = listTablesInHBase();
+ }
+ iterator = result.entrySet().iterator();
+ }
+
+ private Map<String, String> listTablesInHBaseByFromSchema(final String
clusterName) {
+ HTableDescriptor[] tables =
HBaseExecutor.executeAdmin(HBaseContext.getInstance().getConnectionByClusterName(clusterName),
Admin::listTables);
+ Map<String, String> result = new HashMap<>(tables.length);
+ for (HTableDescriptor tableDescriptor : tables) {
+ result.put(tableDescriptor.getNameAsString(), clusterName);
+ }
+ return result;
+ }
+
+ private Map<String, String> listTablesInHBase() {
+ Map<String, String> result = new
HashMap<>(HBaseContext.getInstance().getTableConnectionMap().size());
+ for (Entry<String, HBaseCluster> entry :
HBaseContext.getInstance().getTableConnectionMap().entrySet()) {
+ result.put(entry.getKey(), entry.getValue().getClusterName());
+ }
+ return result;
+ }
+
+ /**
+ * Get result set column names.
+ *
+ * @return result set column names
+ */
+ @Override
+ public Collection<String> getColumnNames() {
+ return Arrays.asList("hbase cluster name", "table name");
+ }
+
+ /**
+ * Go to next data.
+ *
+ * @return true if next data exist
+ */
+ @Override
+ public boolean next() {
+ return iterator.hasNext();
+ }
+
+ /**
+ * Get row data.
+ *
+ * @return row data
+ */
+ @Override
+ public Collection<Object> getRowData() {
+ Entry<String, String> entry = iterator.next();
+ return Arrays.asList(entry.getValue(), entry.getKey());
+ }
+
+ /**
+ * Get type.
+ *
+ * @return type name
+ */
+ @Override
+ public String getType() {
+ return MySQLShowTablesStatement.class.getCanonicalName();
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseQueryResultSet.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseQueryResultSet.java
new file mode 100644
index 00000000000..cc5c9b58551
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseQueryResultSet.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import org.apache.shardingsphere.infra.util.spi.type.typed.TypedSPI;
+import java.util.Collection;
+
+/**
+ * Result set for HBase databases.
+ */
+public interface HBaseDatabaseQueryResultSet extends TypedSPI {
+
+ /**
+ * Initialize data.
+ *
+ * @param sqlStatementContext SQL statement context
+ */
+ void init(SQLStatementContext<?> sqlStatementContext);
+
+ /**
+ * Get result set column names.
+ *
+ * @return result set column names
+ */
+ Collection<String> getColumnNames();
+
+ /**
+ * Go to next data.
+ *
+ * @return true if next data exist
+ */
+ boolean next();
+
+ /**
+ * Get row data.
+ *
+ * @return row data
+ */
+ Collection<Object> getRowData();
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseDeleteUpdater.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseDeleteUpdater.java
new file mode 100644
index 00000000000..5b550161407
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseDeleteUpdater.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.update;
+
+import org.apache.hadoop.hbase.client.Delete;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseDeleteOperationAdapter;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLDeleteStatement;
+import java.util.Collection;
+import java.util.Collections;
+
+/**
+ * HBase database delete updater.
+ */
+public final class HBaseDatabaseDeleteUpdater implements HBaseDatabaseUpdater {
+
+ @Override
+ public Collection<UpdateResult> executeUpdate(final HBaseOperation
hbaseOperation) {
+ if (hbaseOperation.getOperation() instanceof
HBaseDeleteOperationAdapter) {
+ int deleteAffectedSize = ((HBaseDeleteOperationAdapter)
hbaseOperation.getOperation()).getDeletes().size();
+ HBaseExecutor.executeUpdate(hbaseOperation.getTableName(), table
-> table.delete(((HBaseDeleteOperationAdapter)
hbaseOperation.getOperation()).getDeletes()));
+ return Collections.singletonList(new
UpdateResult(deleteAffectedSize, 0));
+ } else {
+ HBaseExecutor.executeUpdate(hbaseOperation.getTableName(), table
-> table.delete((Delete) hbaseOperation.getOperation()));
+ return Collections.singletonList(new UpdateResult(1, 0));
+ }
+ }
+
+ @Override
+ public String getType() {
+ return MySQLDeleteStatement.class.getCanonicalName();
+ }
+
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseInsertUpdater.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseInsertUpdater.java
new file mode 100644
index 00000000000..0024b71317b
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseInsertUpdater.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.update;
+
+import org.apache.hadoop.hbase.client.Put;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseInsertOperationAdapter;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLInsertStatement;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * HBase database insert updater.
+ */
+public final class HBaseDatabaseInsertUpdater implements HBaseDatabaseUpdater {
+
+ @Override
+ public String getType() {
+ return MySQLInsertStatement.class.getCanonicalName();
+ }
+
+ /**
+ * Execute HBase operation.
+ *
+ * @param hbaseOperation HBase operation
+ * @return affected rows
+ */
+ @Override
+ public Collection<UpdateResult> executeUpdate(final HBaseOperation
hbaseOperation) {
+ List<Put> puts = ((HBaseInsertOperationAdapter)
hbaseOperation.getOperation()).getPuts();
+ HBaseExecutor.executeUpdate(hbaseOperation.getTableName(), table ->
table.put(puts));
+ return Collections.singletonList(new UpdateResult(puts.size(), 0));
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseUpdateUpdater.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseUpdateUpdater.java
new file mode 100644
index 00000000000..6d16b8470fc
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseUpdateUpdater.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.update;
+
+import org.apache.hadoop.hbase.client.Put;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseUpdateOperationAdapter;
+import org.apache.shardingsphere.proxy.backend.hbase.executor.HBaseExecutor;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLUpdateStatement;
+import java.util.Collection;
+import java.util.Collections;
+
+/**
+ * HBase database update updater.
+ */
+public final class HBaseDatabaseUpdateUpdater implements HBaseDatabaseUpdater {
+
+ /**
+ * Execute HBase operation.
+ *
+ * @param hbaseOperation HBase operation
+ * @return affected rows
+ */
+ @Override
+ public Collection<UpdateResult> executeUpdate(final HBaseOperation
hbaseOperation) {
+ if (hbaseOperation.getOperation() instanceof
HBaseUpdateOperationAdapter) {
+ HBaseExecutor.executeUpdate(hbaseOperation.getTableName(), table
-> table.put(((HBaseUpdateOperationAdapter)
hbaseOperation.getOperation()).getPuts()));
+ return Collections.singletonList(new
UpdateResult(((HBaseUpdateOperationAdapter)
hbaseOperation.getOperation()).getPuts().size(), 0));
+ } else {
+ HBaseExecutor.executeUpdate(hbaseOperation.getTableName(), table
-> table.put((Put) hbaseOperation.getOperation()));
+ return Collections.singletonList(new UpdateResult(1, 0));
+ }
+ }
+
+ @Override
+ public String getType() {
+ return MySQLUpdateStatement.class.getCanonicalName();
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseAdminCallback.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseUpdater.java
similarity index 58%
rename from
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseAdminCallback.java
rename to
proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseUpdater.java
index 9f373d897c0..d025d9ec149 100644
---
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseAdminCallback.java
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseUpdater.java
@@ -15,24 +15,24 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.proxy.backend.hbase.connector;
+package org.apache.shardingsphere.proxy.backend.hbase.result.update;
-import org.apache.hadoop.hbase.client.Admin;
-import java.io.IOException;
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import org.apache.shardingsphere.infra.util.spi.type.typed.TypedSPI;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import java.util.Collection;
/**
- * Call back for HBase operation.
+ * HBase backend updater.
*
- * @param <T> type of result
*/
-public interface HBaseAdminCallback<T> {
+public interface HBaseDatabaseUpdater extends TypedSPI {
/**
- * Execute in HBase.
+ * Execute HBase operation.
*
- * @param admin execute in HBase table
- * @return execute result
- * @throws IOException IO exception
+ * @param hbaseOperation HBase operation
+ * @return affected rows
*/
- T executeInHBase(Admin admin) throws IOException;
+ Collection<UpdateResult> executeUpdate(HBaseOperation hbaseOperation);
}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseRegionReloadUpdater.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseRegionReloadUpdater.java
new file mode 100644
index 00000000000..fb85040178f
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseRegionReloadUpdater.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.update;
+
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
+import
org.apache.shardingsphere.proxy.backend.hbase.context.HBaseRegionWarmUpContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dal.MySQLFlushStatement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * HBase database region reload updater.
+ */
+public final class HBaseRegionReloadUpdater implements HBaseDatabaseUpdater {
+
+ /**
+ * Execute HBase operation.
+ *
+ * @param hbaseOperation HBase operation
+ * @return affected rows
+ */
+ @Override
+ public Collection<UpdateResult> executeUpdate(final HBaseOperation
hbaseOperation) {
+ List<String> tables =
Arrays.asList(hbaseOperation.getTableName().split(","));
+ AtomicInteger updateCount = new AtomicInteger();
+
tables.stream().filter(this::isNotNullTableName).forEach(this::checkTableExists);
+ tables.stream().filter(this::isNotNullTableName).forEach(tableName -> {
+ updateCount.getAndIncrement();
+ HBaseRegionWarmUpContext.getInstance().loadRegionInfo(tableName,
HBaseContext.getInstance().getConnection(tableName));
+ });
+ return Collections.singletonList(new UpdateResult(updateCount.get(),
0));
+ }
+
+ private void checkTableExists(final String tableName) {
+ if (!HBaseContext.getInstance().isTableExists(tableName)) {
+ throw new HBaseOperationException(String.format("Table `%s` is not
exists", tableName));
+ }
+ }
+
+ private boolean isNotNullTableName(final String tableName) {
+ return !"".equals(tableName) && !"null".equalsIgnoreCase(tableName);
+ }
+
+ @Override
+ public String getType() {
+ return MySQLFlushStatement.class.getCanonicalName();
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/util/HeterogeneousUtil.java
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/util/HeterogeneousUtil.java
new file mode 100644
index 00000000000..6090028ca4e
--- /dev/null
+++
b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/util/HeterogeneousUtil.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.util;
+
+import lombok.AccessLevel;
+import lombok.NoArgsConstructor;
+import
org.apache.shardingsphere.infra.binder.segment.select.projection.impl.ShorthandProjection;
+import
org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementContext;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ExpressionProjectionSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * heterogeneous util.
+ */
+@NoArgsConstructor(access = AccessLevel.PRIVATE)
+public class HeterogeneousUtil {
+
+ /**
+ * Is Crc ProjectionSegment.
+ * @param projectionSegment projectionSegment
+ * @return result
+ */
+ public static boolean isCrcProjectionSegment(final ProjectionSegment
projectionSegment) {
+
+ if (projectionSegment instanceof ExpressionProjectionSegment) {
+ return ((ExpressionProjectionSegment)
projectionSegment).getText().contains("crc32");
+ }
+
+ return false;
+ }
+
+ /**
+ * convert prepared statement to literal statement.
+ * @param source sql statement
+ * @param target parameter marker
+ * @param replacements parameters
+ * @return literal statement
+ */
+ public static String replaceSQLStatementWithParameters(final String
source, final CharSequence target, final Object... replacements) {
+ if (null == source || null == replacements) {
+ return source;
+ }
+ Matcher matcher = Pattern.compile(target.toString(),
Pattern.LITERAL).matcher(source);
+ int found = 0;
+ StringBuffer sb = new StringBuffer();
+ while (matcher.find()) {
+ found++;
+ if (found > replacements.length) {
+ throw new IllegalArgumentException(String.format("Missing
replacement for '%s' at [%s].", target, found));
+ }
+ matcher.appendReplacement(sb,
Matcher.quoteReplacement(replacements[found - 1].toString()));
+ }
+ matcher.appendTail(sb);
+ return sb.toString();
+ }
+
+ /**
+ * Return true if use * in SELECT Statement.
+ * @param statementContext Select Statement Context
+ * @return is use shorthand projection
+ */
+ public static boolean isUseShorthandProjection(final
SelectStatementContext statementContext) {
+ return
statementContext.getProjectionsContext().getProjections().stream().anyMatch(each
-> each instanceof ShorthandProjection);
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverterFactoryTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverterFactoryTest.java
new file mode 100644
index 00000000000..7beec90a6f6
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseConverterFactoryTest.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dal.FlushStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.DeleteStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.InsertStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementContext;
+import
org.apache.shardingsphere.infra.binder.statement.dml.UpdateStatementContext;
+import org.junit.Test;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.mock;
+
+public final class HBaseDatabaseConverterFactoryTest {
+
+ @Test
+ public void assertExecuteSelectStatement() {
+ SQLStatementContext<?> sqlStatementContext =
mock(SelectStatementContext.class);
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ assertThat(converter, instanceOf(HBaseDatabaseSelectConverter.class));
+ }
+
+ @Test
+ public void assertExecuteInsertStatement() {
+ SQLStatementContext<?> sqlStatementContext =
mock(InsertStatementContext.class);
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ assertThat(converter, instanceOf(HBaseDatabaseInsertConverter.class));
+ }
+
+ @Test
+ public void assertExecuteUpdateStatement() {
+ SQLStatementContext<?> sqlStatementContext =
mock(UpdateStatementContext.class);
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ assertThat(converter, instanceOf(HBaseDatabaseUpdateConverter.class));
+ }
+
+ @Test
+ public void assertExecuteDeleteStatement() {
+ SQLStatementContext<?> sqlStatementContext =
mock(DeleteStatementContext.class);
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ assertThat(converter, instanceOf(HBaseDatabaseDeleteConverter.class));
+ }
+
+ @Test
+ public void assertExecuteFlushStatement() {
+ SQLStatementContext<?> sqlStatementContext =
mock(FlushStatementContext.class);
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ assertThat(converter, instanceOf(HBaseRegionReloadConverter.class));
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseDeleteConverterTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseDeleteConverterTest.java
new file mode 100644
index 00000000000..ffcea8e69b5
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseDeleteConverterTest.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.shardingsphere.infra.binder.SQLStatementContextFactory;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import org.junit.Test;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+public final class HBaseDatabaseDeleteConverterTest {
+
+ @Test
+ public void assertConvert() {
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getDeleteStatement());
+ SQLStatementContext<?> sqlStatementContext =
SQLStatementContextFactory.newInstance(null, sqlStatement, "");
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ HBaseOperation hbaseOperation = converter.convert();
+ assertThat(hbaseOperation.getTableName(),
equalTo(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ assertThat(hbaseOperation.getOperation(), instanceOf(Delete.class));
+ }
+
+ @Test
+ public void assertConvertWithIn() {
+ String sql = " delete /*+ hbase */ from t_test_order where rowKey in
('2', '1')";
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(sql);
+ SQLStatementContext<?> sqlStatementContext =
SQLStatementContextFactory.newInstance(null, sqlStatement, "");
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ HBaseOperation hBaseOperation = converter.convert();
+ assertThat(hBaseOperation.getTableName(),
equalTo(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ assertThat(hBaseOperation.getOperation(),
instanceOf(HBaseDeleteOperationAdapter.class));
+ assertThat(((HBaseDeleteOperationAdapter)
hBaseOperation.getOperation()).getDeletes().size(), is(2));
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseRowKeysConverterAdapterTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseRowKeysConverterAdapterTest.java
new file mode 100644
index 00000000000..9edaf05e6af
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseRowKeysConverterAdapterTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment;
+import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
+import org.junit.Test;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class HBaseDatabaseRowKeysConverterAdapterTest {
+
+ @Test
+ public void assertGetRowKeysFromInExpression() {
+ SelectStatement sqlStatement = (SelectStatement)
HBaseSupportedSQLStatement.parseSQLStatement("select /*+ hbase */ * from
t_order where rowKey in ('1', '2') ");
+ Optional<WhereSegment> whereSegment = sqlStatement.getWhere();
+ HBaseDatabaseRowKeysConverterAdapter adapter = new
HBaseDatabaseRowKeysConverterAdapter();
+ if (whereSegment.isPresent()) {
+ List<String> rowKeys =
adapter.getRowKeysFromWhereSegmentByIn((InExpression)
whereSegment.get().getExpr());
+ List<String> actual = Arrays.asList("1", "2");
+ assertEquals(rowKeys, actual);
+ } else {
+ fail();
+ }
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseUpdateConverterTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseUpdateConverterTest.java
new file mode 100644
index 00000000000..ea0abe15c01
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/HBaseDatabaseUpdateConverterTest.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.converter;
+
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.shardingsphere.infra.binder.SQLStatementContextFactory;
+import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext;
+import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import org.junit.Test;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+public final class HBaseDatabaseUpdateConverterTest {
+
+ @Test
+ public void assertConvert() {
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getUpdateStatement());
+ SQLStatementContext<?> sqlStatementContext =
SQLStatementContextFactory.newInstance(null, sqlStatement, "");
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ HBaseOperation hbaseOperation = converter.convert();
+ assertThat(hbaseOperation.getTableName(),
equalTo(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ assertThat(hbaseOperation.getOperation(), instanceOf(Put.class));
+ }
+
+ @Test
+ public void assertConvertWithIn() {
+ String sql = " update /*+ hbase */ t_test_order set age = 10 where
rowKey in (1, '2')";
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(sql);
+ SQLStatementContext<?> sqlStatementContext =
SQLStatementContextFactory.newInstance(null, sqlStatement, "");
+ HBaseDatabaseConverter converter =
HBaseDatabaseConverterFactory.newInstance(sqlStatementContext);
+ HBaseOperation hBaseOperation = converter.convert();
+ assertThat(hBaseOperation.getTableName(),
equalTo(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ assertThat(hBaseOperation.getOperation(),
instanceOf(HBaseUpdateOperationAdapter.class));
+ assertThat(((HBaseUpdateOperationAdapter)
hBaseOperation.getOperation()).getPuts().size(), is(2));
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseSupportedSQLStatement.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseSupportedSQLStatement.java
new file mode 100644
index 00000000000..ea7c818b01a
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/HBaseSupportedSQLStatement.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result;
+
+import org.apache.shardingsphere.sql.parser.api.CacheOption;
+import org.apache.shardingsphere.sql.parser.api.SQLParserEngine;
+import org.apache.shardingsphere.sql.parser.api.SQLVisitorEngine;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import java.util.Properties;
+
+/**
+ * SQL Statement which supported in HBase.
+ */
+public class HBaseSupportedSQLStatement {
+
+ public static final String HBASE_DATABASE_TABLE_NAME = "t_test_order";
+
+ private static final String SELECT_STATEMENT = "select /*+ hbase */ * from
t_order where id = 1";
+
+ private static final String SHOW_TABLE_STATEMENT = "show /*+ hbase */
tables";
+
+ private static final String SHOW_CREATE_TABLE_STATEMENT = "show /*+ hbase
*/ create table t_test_order";
+
+ private static final String UPDATE_STATEMENT = "update /*+ hbase */
t_test_order set age = 10 where rowKey = 1";
+
+ private static final String DELETE_STATEMENT = "delete /*+ hbase */ from
t_test_order where rowKey = 'kid'";
+
+ private static final String INSERT_STATEMENT = "insert /*+ hbase */ into
t_test_order(rowKey, v1, v2) values(1, 2, 3)";
+
+ private static final String SHOW_DATABASES = "show /*+ hbase */ databases";
+
+ private static final String FLUSH_TABLES = "flush /*+ hbase */ tables
t_test";
+
+ /**
+ * parse sql statement.
+ *
+ * @param sql sql
+ * @return SQLStatement Object.
+ */
+ public static SQLStatement parseSQLStatement(final String sql) {
+ return new SQLVisitorEngine("MySQL", "STATEMENT", false, new
Properties()).visit(new SQLParserEngine("MySQL",
+ new CacheOption(128, 4)).parse(sql, false));
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getInsertStatement() {
+ return INSERT_STATEMENT;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getDeleteStatement() {
+ return DELETE_STATEMENT;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getUpdateStatement() {
+ return UPDATE_STATEMENT;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getSelectStatement() {
+ return SELECT_STATEMENT;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getShowCreateTableStatement() {
+ return SHOW_CREATE_TABLE_STATEMENT;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getShowTableStatement() {
+ return SHOW_TABLE_STATEMENT;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getShowDatabaseStatement() {
+ return SHOW_DATABASES;
+ }
+
+ /**
+ * get SQL Statement for test.
+ * @return SQLStatement.
+ */
+ public static String getFlushTablesStatement() {
+ return FLUSH_TABLES;
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/AbstractHBaseDatabaseQueryResultSetTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/AbstractHBaseDatabaseQueryResultSetTest.java
new file mode 100644
index 00000000000..f4a0f28a9cc
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/AbstractHBaseDatabaseQueryResultSetTest.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import lombok.Getter;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.shardingsphere.infra.binder.segment.table.TablesContext;
+import org.apache.shardingsphere.proxy.backend.hbase.context.HBaseContext;
+import org.apache.shardingsphere.proxy.backend.hbase.props.HBaseProperties;
+import org.apache.shardingsphere.proxy.backend.hbase.props.HBasePropertyKey;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import org.apache.shardingsphere.test.util.PropertiesBuilder;
+import org.apache.shardingsphere.test.util.PropertiesBuilder.Property;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.mockito.junit.jupiter.MockitoSettings;
+import org.mockito.quality.Strictness;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Properties;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+@ExtendWith(MockitoExtension.class)
+@MockitoSettings(strictness = Strictness.LENIENT)
+@Getter
+public abstract class AbstractHBaseDatabaseQueryResultSetTest {
+
+ private final Table table = mock(Table.class, RETURNS_DEEP_STUBS);
+
+ private final Admin admin = mock(HBaseAdmin.class, RETURNS_DEEP_STUBS);
+
+ private final Connection connection = mock(Connection.class,
RETURNS_DEEP_STUBS);
+
+ private final TablesContext tablesContext = mock(TablesContext.class,
RETURNS_DEEP_STUBS);
+
+ private Collection<String> tableNames;
+
+ @Before
+ public void setUp() throws IOException {
+ Properties props = createProperties();
+ HBaseProperties hBaseProperties = new HBaseProperties(props);
+ HBaseContext.getInstance().setProps(hBaseProperties);
+ tableNames = new ArrayList<>();
+ tableNames.add("t_test_table");
+ when(tablesContext.getTableNames()).thenReturn(tableNames);
+ HTableDescriptor[] tableDescriptors = createHTableDescriptors();
+ when(admin.tableExists(any())).thenReturn(true);
+ when(admin.getTableDescriptor(any())).thenReturn(tableDescriptors[0]);
+ when(admin.listTables()).thenReturn(tableDescriptors);
+ when(connection.getAdmin()).thenReturn(admin);
+ when(connection.getTable(any())).thenReturn(table);
+ HBaseContext.getInstance().init(Collections.singletonMap("cluster_lj",
connection));
+ }
+
+ private Properties createProperties() {
+ return PropertiesBuilder.build(
+ new Property(HBasePropertyKey.WARM_UP_THREAD_NUM.getKey(),
String.valueOf(1)));
+ }
+
+ private HTableDescriptor[] createHTableDescriptors() {
+ HTableDescriptor descriptor = mock(HTableDescriptor.class);
+
when(descriptor.getNameAsString()).thenReturn(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME);
+ when(descriptor.toStringTableAttributes()).thenReturn("{attributes}");
+ when(descriptor.getFlushPolicyClassName()).thenReturn("");
+ when(descriptor.getMaxFileSize()).thenReturn(-1L);
+ when(descriptor.getMemStoreFlushSize()).thenReturn(-1L);
+ when(descriptor.getPriority()).thenReturn(0);
+ when(descriptor.getRegionReplication()).thenReturn(1);
+ when(descriptor.getRegionSplitPolicyClassName()).thenReturn(null);
+ when(descriptor.toStringCustomizedValues()).thenReturn("");
+
when(descriptor.getFamilies()).thenReturn(Collections.singletonList(mock(HColumnDescriptor.class)));
+ return new HTableDescriptor[]{descriptor};
+ }
+
+ @After
+ public void tearDown() {
+ HBaseContext.getInstance().close();
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseDescribeResultSetTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseDescribeResultSetTest.java
new file mode 100644
index 00000000000..e0b12091a6b
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseDescribeResultSetTest.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import org.apache.shardingsphere.infra.binder.segment.table.TablesContext;
+import
org.apache.shardingsphere.infra.binder.statement.dal.ShowCreateTableStatementContext;
+import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import org.junit.Test;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class HBaseDatabaseDescribeResultSetTest extends
AbstractHBaseDatabaseQueryResultSetTest {
+
+ private final TablesContext tablesContext = mock(TablesContext.class,
RETURNS_DEEP_STUBS);
+
+ @Test
+ public void assertGetRowData() {
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseDescribeResultSet();
+ ShowCreateTableStatementContext context =
mock(ShowCreateTableStatementContext.class);
+ when(context.getTablesContext()).thenReturn(tablesContext);
+
when(context.getTablesContext().getTableNames().iterator().next()).thenReturn(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME);
+ resultSet.init(context);
+
+ List<Object> actual = new ArrayList<>(resultSet.getRowData());
+ assertThat(actual.size(), is(9));
+ assertThat(actual.get(0),
is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ assertThat(actual.get(5), is(0));
+ assertThat(actual.get(6), is(1));
+ assertThat(actual.get(8), is(""));
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void assertGetRowDataWithTableIsNotExists() throws IOException {
+ when(getAdmin().tableExists(any())).thenReturn(false);
+ ShowCreateTableStatementContext context =
mock(ShowCreateTableStatementContext.class);
+ when(context.getTablesContext()).thenReturn(tablesContext);
+
when(context.getTablesContext().getTableNames().iterator().next()).thenReturn(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME);
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseDescribeResultSet();
+ resultSet.init(context);
+ }
+
+ @Test(expected = HBaseOperationException.class)
+ public void assertGetRowDataWithBackendError() throws IOException {
+
when(getAdmin().getTableDescriptor(any())).thenThrow(IOException.class);
+ ShowCreateTableStatementContext context =
mock(ShowCreateTableStatementContext.class);
+ when(context.getTablesContext()).thenReturn(tablesContext);
+
when(context.getTablesContext().getTableNames().iterator().next()).thenReturn(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME);
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseDescribeResultSet();
+ resultSet.init(context);
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseListResultSetTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseListResultSetTest.java
new file mode 100644
index 00000000000..ea2006d1320
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseDatabaseListResultSetTest.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.query;
+
+import
org.apache.shardingsphere.infra.binder.statement.dal.ShowTablesStatementContext;
+import
org.apache.shardingsphere.proxy.backend.hbase.exception.HBaseOperationException;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import
org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dal.MySQLShowTablesStatement;
+import org.junit.Test;
+import java.util.ArrayList;
+import java.util.List;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public final class HBaseDatabaseListResultSetTest extends
AbstractHBaseDatabaseQueryResultSetTest {
+
+ @Test
+ public void assertGetRowData() {
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getShowTableStatement());
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseListResultSet();
+ ShowTablesStatementContext context =
mock(ShowTablesStatementContext.class);
+ when(context.getSqlStatement()).thenReturn((MySQLShowTablesStatement)
sqlStatement);
+ resultSet.init(context);
+
+ assertTrue(resultSet.next());
+ List<Object> actual = new ArrayList<>(resultSet.getRowData());
+ assertThat(actual.size(), is(2));
+ assertThat(actual.get(0), is("cluster_lj"));
+ assertThat(actual.get(1),
is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ }
+
+ @Test
+ public void assertGetRowDataFromRemoteHBaseCluster() {
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement("show /*+ hbase */ tables from
cluster_lj");
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseListResultSet();
+ ShowTablesStatementContext context =
mock(ShowTablesStatementContext.class);
+ when(context.getSqlStatement()).thenReturn((MySQLShowTablesStatement)
sqlStatement);
+ resultSet.init(context);
+
+ assertTrue(resultSet.next());
+ List<Object> actual = new ArrayList<>(resultSet.getRowData());
+ assertThat(actual.size(), is(2));
+ assertThat(actual.get(0), is("cluster_lj"));
+ assertThat(actual.get(1),
is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ }
+
+ @Test
+ public void assertGetRowDataByLike() {
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement("show /*+ hbase */ tables like
't_test' ");
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseListResultSet();
+ ShowTablesStatementContext context =
mock(ShowTablesStatementContext.class);
+ when(context.getSqlStatement()).thenReturn((MySQLShowTablesStatement)
sqlStatement);
+ resultSet.init(context);
+
+ sqlStatement = HBaseSupportedSQLStatement.parseSQLStatement("show /*+
hbase */ tables like 't_test%' ");
+ when(context.getSqlStatement()).thenReturn((MySQLShowTablesStatement)
sqlStatement);
+ resultSet.init(context);
+ assertTrue(resultSet.next());
+ List<Object> actual = new ArrayList<>(resultSet.getRowData());
+ assertThat(actual.size(), is(2));
+ assertThat(actual.get(1),
is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME));
+ }
+
+ @Test(expected = HBaseOperationException.class)
+ public void assertGetRowDataError() {
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement("show /*+ hbase */ tables from
cluster_do_not_exists");
+ HBaseDatabaseQueryResultSet resultSet = new
HBaseDatabaseListResultSet();
+ ShowTablesStatementContext context =
mock(ShowTablesStatementContext.class);
+ when(context.getSqlStatement()).thenReturn((MySQLShowTablesStatement)
sqlStatement);
+ resultSet.init(context);
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseBackendUpdateHandlerTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseBackendUpdateHandlerTest.java
new file mode 100644
index 00000000000..3d1032565de
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/result/update/HBaseDatabaseBackendUpdateHandlerTest.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.result.update;
+
+import
org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult;
+import
org.apache.shardingsphere.proxy.backend.hbase.impl.HBaseDatabaseBackendUpdateHandler;
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import
org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import org.junit.Test;
+import java.util.Collections;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public final class HBaseDatabaseBackendUpdateHandlerTest {
+
+ @Test
+ public void assertExecuteDeleteStatement() {
+ HBaseDatabaseDeleteUpdater updater =
mock(HBaseDatabaseDeleteUpdater.class);
+
when(updater.executeUpdate(any())).thenReturn(Collections.singletonList(new
UpdateResult(1, 0)));
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getDeleteStatement());
+ HBaseDatabaseBackendUpdateHandler handler = new
HBaseDatabaseBackendUpdateHandler(sqlStatement, updater);
+ UpdateResponseHeader result = handler.execute();
+ assertUpdateResponseHeader(sqlStatement, result);
+ }
+
+ @Test
+ public void assertExecuteUpdateStatement() {
+ HBaseDatabaseUpdateUpdater updater =
mock(HBaseDatabaseUpdateUpdater.class);
+
when(updater.executeUpdate(any())).thenReturn(Collections.singletonList(new
UpdateResult(1, 0)));
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getUpdateStatement());
+ HBaseDatabaseBackendUpdateHandler handler = new
HBaseDatabaseBackendUpdateHandler(sqlStatement, updater);
+ UpdateResponseHeader result = handler.execute();
+ assertUpdateResponseHeader(sqlStatement, result);
+ }
+
+ @Test
+ public void assertFlushTableStatement() {
+ HBaseRegionReloadUpdater updater =
mock(HBaseRegionReloadUpdater.class);
+
when(updater.executeUpdate(any())).thenReturn(Collections.singletonList(new
UpdateResult(1, 0)));
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getFlushTablesStatement());
+ HBaseDatabaseBackendUpdateHandler handler = new
HBaseDatabaseBackendUpdateHandler(sqlStatement, updater);
+ UpdateResponseHeader result = handler.execute();
+ assertUpdateResponseHeader(sqlStatement, result);
+ }
+
+ private void assertUpdateResponseHeader(final SQLStatement sqlStatement,
final UpdateResponseHeader responseHeader) {
+ assertThat(responseHeader, instanceOf(UpdateResponseHeader.class));
+ assertThat(responseHeader.getSqlStatement(), is(sqlStatement));
+ assertThat(responseHeader.getUpdateCount(), is(1L));
+ }
+}
diff --git
a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/util/HeterogeneousUtilTest.java
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/util/HeterogeneousUtilTest.java
new file mode 100644
index 00000000000..290bcfdbf4d
--- /dev/null
+++
b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/util/HeterogeneousUtilTest.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.proxy.backend.hbase.util;
+
+import
org.apache.shardingsphere.proxy.backend.hbase.result.HBaseSupportedSQLStatement;
+import
org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+import
org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
+import org.junit.Test;
+import java.util.ArrayList;
+import java.util.List;
+import static org.junit.Assert.assertTrue;
+
+public class HeterogeneousUtilTest {
+
+ @Test
+ public void assertCrc32ProjectionSegment() {
+ String sql = "SELECT /*+ HBase */ rowKey, crc32(concat_ws('#',rowKey))
from t_order where rowKey in (1, 2, 3)";
+ SQLStatement sqlStatement =
HBaseSupportedSQLStatement.parseSQLStatement(sql);
+ List<ProjectionSegment> projectionSegments = new
ArrayList<>(((SelectStatement) sqlStatement).getProjections().getProjections());
+
assertTrue(HeterogeneousUtil.isCrcProjectionSegment(projectionSegments.get(1)));
+ }
+}
diff --git
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
index cf25cc7e543..2d673cbf4f0 100644
---
a/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
+++
b/test/e2e/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java
@@ -137,8 +137,9 @@ public final class CDCE2EIT {
log.info("init data begin: {}", LocalDateTime.now());
DataSourceExecuteUtil.execute(jdbcDataSource,
containerComposer.getExtraSQLCommand().getFullInsertOrder(SOURCE_TABLE_ORDER_NAME),
dataPair.getLeft());
log.info("init data end: {}", LocalDateTime.now());
- try (Connection connection =
DriverManager.getConnection(containerComposer.getActualJdbcUrlTemplate(PipelineContainerComposer.DS_4,
false),
- containerComposer.getUsername(),
containerComposer.getPassword())) {
+ try (
+ Connection connection =
DriverManager.getConnection(containerComposer.getActualJdbcUrlTemplate(PipelineContainerComposer.DS_4,
false),
+ containerComposer.getUsername(),
containerComposer.getPassword())) {
initSchemaAndTable(connection, 0);
}
startCDCClient();