This is an automated email from the ASF dual-hosted git repository.

duanzhengqiang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 10da929f68d Support Hive data mask E2E test and data encrypt DML 
statement test. (#36492)
10da929f68d is described below

commit 10da929f68d5b61e023404a1fc39d8c09cd173e2
Author: Claire <[email protected]>
AuthorDate: Tue Oct 14 10:06:01 2025 +0800

    Support Hive data mask E2E test and data encrypt DML statement test. 
(#36492)
    
    * new version
    
    * update
    
    * add spi
    
    * update
    
    * add one more spi
    
    * support encrypt e2e test
    
    * add one more spi
---
 .github/workflows/nightly-e2e-sql.yml              |  3 ++
 .../e2e/env/container/util/SQLScriptUtils.java     | 10 ++++
 .../util/spi/SQLBatchExecutionStrategy.java        | 51 ++++++++++++++++++
 .../spi/impl/HiveSQLBatchExecutionStrategy.java    | 48 +++++++++++++++++
 ...nv.container.util.spi.SQLBatchExecutionStrategy | 18 +++++++
 .../test/e2e/sql/cases/casse/SQLE2ETestCase.java   |  3 ++
 .../e2e/sql/env/DataSetEnvironmentManager.java     | 59 +++++++++++++++++----
 .../metadata/DialectQueryBehaviorProvider.java     | 42 +++++++++++++++
 .../dialect/HiveDialectQueryBehaviorProvider.java  | 38 ++++++++++++++
 .../param/array/E2ETestParameterGenerator.java     |  3 ++
 .../test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java      | 19 ++++++-
 ...framework.metadata.DialectQueryBehaviorProvider | 18 +++++++
 .../dml/dataset/encrypt/update_user_table_hive.xml | 59 +++++++++++++++++++++
 .../test/resources/cases/dml/e2e-dml-delete.xml    |  4 ++
 .../test/resources/cases/dml/e2e-dml-insert.xml    |  4 ++
 .../test/resources/cases/dml/e2e-dml-update.xml    |  4 ++
 .../cases/dql/e2e-dql-select-group-by.xml          |  4 ++
 .../cases/dql/e2e-dql-select-order-by.xml          |  2 +-
 .../test/resources/cases/dql/e2e-dql-select.xml    |  4 ++
 .../init-sql/hive/50-scenario-actual-init.sql      | 23 ++++++---
 .../init-sql/hive/60-scenario-expected-init.sql    | 23 ++++++---
 .../init-sql/hive/50-scenario-actual-init.sql      | 33 ++++++++++++
 .../init-sql/hive/60-scenario-expected-init.sql    | 33 ++++++++++++
 .../env/scenario/mask/jdbc/conf/hive/rules.yaml    | 60 ++++++++++++++++++++++
 24 files changed, 541 insertions(+), 24 deletions(-)

diff --git a/.github/workflows/nightly-e2e-sql.yml 
b/.github/workflows/nightly-e2e-sql.yml
index 0540ac4bfd7..d7edde33647 100644
--- a/.github/workflows/nightly-e2e-sql.yml
+++ b/.github/workflows/nightly-e2e-sql.yml
@@ -86,6 +86,9 @@ jobs:
           - adapter: jdbc
             database: Hive
             scenario: encrypt
+          - adapter: jdbc
+            database: Hive
+            scenario: mask
         exclude:
           - adapter: jdbc
             scenario: passthrough
diff --git 
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
 
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
index b4e8be10d6c..95f3f5fdc2e 100644
--- 
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
+++ 
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
@@ -22,6 +22,7 @@ import lombok.NoArgsConstructor;
 import lombok.SneakyThrows;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.Strings;
+import 
org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy;
 import org.h2.util.ScriptReader;
 
 import javax.sql.DataSource;
@@ -38,6 +39,7 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.Collection;
 import java.util.LinkedList;
+import java.util.ServiceLoader;
 
 /**
  * SQL script utility class.
@@ -99,6 +101,14 @@ public final class SQLScriptUtils {
     private static void executeBatch(final Connection connection, final 
Collection<String> sqls) throws SQLException {
         int count = 0;
         try (Statement statement = connection.createStatement()) {
+            String driverName = connection.getMetaData().getDriverName();
+            ServiceLoader<SQLBatchExecutionStrategy> loader = 
ServiceLoader.load(SQLBatchExecutionStrategy.class);
+            for (SQLBatchExecutionStrategy strategy : loader) {
+                if (strategy.supports(driverName)) {
+                    strategy.execute(connection, statement, sqls);
+                    return;
+                }
+            }
             for (String each : sqls) {
                 statement.addBatch(each);
                 count++;
diff --git 
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/SQLBatchExecutionStrategy.java
 
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/SQLBatchExecutionStrategy.java
new file mode 100644
index 00000000000..d1a0f7df2bb
--- /dev/null
+++ 
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/SQLBatchExecutionStrategy.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.test.e2e.env.container.util.spi;
+
+import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Collection;
+
+/**
+ * SQL batch execution strategy.
+ *
+ * <p>Allow different drivers to decide whether to use addBatch/executeBatch 
or fallback to execute one by one.</p>
+ */
+public interface SQLBatchExecutionStrategy extends TypedSPI {
+    
+    /**
+     * Whether this strategy supports the given JDBC driver name.
+     *
+     * @param jdbcDriverName driver name from connection metadata
+     * @return true if supported
+     */
+    boolean supports(String jdbcDriverName);
+    
+    /**
+     * Execute SQLs using the strategy.
+     *
+     * @param connection connection
+     * @param statement statement
+     * @param sqls SQL collection
+     * @throws SQLException SQL exception
+     */
+    void execute(Connection connection, Statement statement, 
Collection<String> sqls) throws SQLException;
+}
diff --git 
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/impl/HiveSQLBatchExecutionStrategy.java
 
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/impl/HiveSQLBatchExecutionStrategy.java
new file mode 100644
index 00000000000..63b0b906117
--- /dev/null
+++ 
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/impl/HiveSQLBatchExecutionStrategy.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.test.e2e.env.container.util.spi.impl;
+
+import 
org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Collection;
+
+/**
+ * Hive SQL batch execution strategy.
+ */
+public final class HiveSQLBatchExecutionStrategy implements 
SQLBatchExecutionStrategy {
+    
+    @Override
+    public String getType() {
+        return "Hive";
+    }
+    
+    @Override
+    public boolean supports(final String jdbcDriverName) {
+        return null != jdbcDriverName && 
jdbcDriverName.toLowerCase().contains("hive");
+    }
+    
+    @Override
+    public void execute(final Connection connection, final Statement 
statement, final Collection<String> sqls) throws SQLException {
+        for (String each : sqls) {
+            statement.execute(each);
+        }
+    }
+}
diff --git 
a/test/e2e/env/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy
 
b/test/e2e/env/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy
new file mode 100644
index 00000000000..ba85ce87c67
--- /dev/null
+++ 
b/test/e2e/env/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.shardingsphere.test.e2e.env.container.util.spi.impl.HiveSQLBatchExecutionStrategy
diff --git 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/cases/casse/SQLE2ETestCase.java
 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/cases/casse/SQLE2ETestCase.java
index 08d89ddcc85..39dc64fcd7d 100644
--- 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/cases/casse/SQLE2ETestCase.java
+++ 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/cases/casse/SQLE2ETestCase.java
@@ -57,6 +57,9 @@ public final class SQLE2ETestCase {
     @XmlAttribute
     private boolean smoke;
     
+    @XmlAttribute(name = "skip-batch")
+    private boolean skipBatch;
+    
     @XmlElement(name = "assertion")
     private Collection<SQLE2ETestCaseAssertion> assertions = new 
LinkedList<>();
 }
diff --git 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
index 56d0f7ab55d..373792cd8dd 100644
--- 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
+++ 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
@@ -24,6 +24,7 @@ import 
org.apache.shardingsphere.database.connector.core.type.DatabaseTypeFactor
 import 
org.apache.shardingsphere.database.connector.core.type.DatabaseTypeRegistry;
 import 
org.apache.shardingsphere.database.connector.opengauss.type.OpenGaussDatabaseType;
 import 
org.apache.shardingsphere.database.connector.postgresql.type.PostgreSQLDatabaseType;
+import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
 import org.apache.shardingsphere.infra.datanode.DataNode;
 import org.apache.shardingsphere.infra.executor.kernel.ExecutorEngine;
 import 
org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorServiceManager;
@@ -41,8 +42,10 @@ import javax.xml.bind.JAXBException;
 import java.io.FileReader;
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.Date;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
 import java.util.Collection;
 import java.util.LinkedHashMap;
 import java.util.LinkedList;
@@ -94,10 +97,10 @@ public final class DataSetEnvironmentManager {
             }
             String insertSQL;
             try (Connection connection = 
dataSourceMap.get(dataNode.getDataSourceName()).getConnection()) {
-                DatabaseType databaseType = 
DatabaseTypeFactory.get(connection.getMetaData().getURL());
-                insertSQL = generateInsertSQL(dataNode.getTableName(), 
dataSetMetaData.getColumns(), databaseType);
+                String insertTableName = dataNode.getTableName();
+                insertSQL = generateInsertSQL(insertTableName, 
dataSetMetaData.getColumns(), databaseType);
             }
-            fillDataTasks.add(new 
InsertTask(dataSourceMap.get(dataNode.getDataSourceName()), insertSQL, 
sqlValueGroups));
+            fillDataTasks.add(new 
InsertTask(dataSourceMap.get(dataNode.getDataSourceName()), insertSQL, 
sqlValueGroups, databaseType));
         }
         final List<Future<Void>> futures = 
EXECUTOR_SERVICE_MANAGER.getExecutorService().invokeAll(fillDataTasks);
         for (Future<Void> future : futures) {
@@ -192,23 +195,48 @@ public final class DataSetEnvironmentManager {
         
         private final Collection<SQLValueGroup> sqlValueGroups;
         
+        private final DatabaseType databaseType;
+        
         @Override
         public Void call() throws SQLException {
             try (
                     Connection connection = dataSource.getConnection();
                     PreparedStatement preparedStatement = 
connection.prepareStatement(insertSQL)) {
-                for (SQLValueGroup each : sqlValueGroups) {
-                    setParameters(preparedStatement, each);
-                    preparedStatement.addBatch();
+                boolean supportsBatchUpdates;
+                try {
+                    supportsBatchUpdates = 
connection.getMetaData().supportsBatchUpdates();
+                } catch (final SQLFeatureNotSupportedException ignored) {
+                    supportsBatchUpdates = false;
+                }
+                if (supportsBatchUpdates) {
+                    for (SQLValueGroup each : sqlValueGroups) {
+                        setParameters(preparedStatement, each);
+                        preparedStatement.addBatch();
+                    }
+                    preparedStatement.executeBatch();
+                } else {
+                    for (SQLValueGroup each : sqlValueGroups) {
+                        setParameters(preparedStatement, each);
+                        preparedStatement.executeUpdate();
+                    }
                 }
-                preparedStatement.executeBatch();
             }
             return null;
         }
         
         private void setParameters(final PreparedStatement preparedStatement, 
final SQLValueGroup sqlValueGroup) throws SQLException {
             for (SQLValue each : sqlValueGroup.getValues()) {
-                preparedStatement.setObject(each.getIndex(), each.getValue());
+                Object value = each.getValue();
+                int index = each.getIndex();
+                if ("Hive".equalsIgnoreCase(databaseType.getType())) {
+                    if (value instanceof Date) {
+                        preparedStatement.setDate(index, (java.sql.Date) 
value);
+                    } else {
+                        preparedStatement.setObject(index, value);
+                    }
+                } else {
+                    preparedStatement.setObject(index, value);
+                }
             }
         }
     }
@@ -223,8 +251,8 @@ public final class DataSetEnvironmentManager {
         @Override
         public Void call() throws SQLException {
             try (Connection connection = dataSource.getConnection()) {
+                DatabaseType databaseType = getDatabaseType(connection);
                 for (String each : tableNames) {
-                    DatabaseType databaseType = 
DatabaseTypeFactory.get(connection.getMetaData().getURL());
                     String quotedTableName = getQuotedTableName(each, 
databaseType);
                     try (PreparedStatement preparedStatement = 
connection.prepareStatement(String.format("TRUNCATE TABLE %s", 
quotedTableName))) {
                         preparedStatement.execute();
@@ -234,6 +262,19 @@ public final class DataSetEnvironmentManager {
             return null;
         }
         
+        private DatabaseType getDatabaseType(final Connection connection) 
throws SQLException {
+            try {
+                String url = connection.getMetaData().getURL();
+                return DatabaseTypeFactory.get(url);
+            } catch (final SQLFeatureNotSupportedException ex) {
+                String driverName = connection.getMetaData().getDriverName();
+                if (null != driverName && 
driverName.toLowerCase().contains("hive")) {
+                    return TypedSPILoader.getService(DatabaseType.class, 
"Hive");
+                }
+                throw ex;
+            }
+        }
+        
         private String getQuotedTableName(final String tableName, final 
DatabaseType databaseType) {
             DatabaseTypeRegistry databaseTypeRegistry = new 
DatabaseTypeRegistry(databaseType);
             return 
databaseTypeRegistry.getDialectDatabaseMetaData().getQuoteCharacter().wrap(databaseTypeRegistry.formatIdentifierPattern(tableName));
diff --git 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/DialectQueryBehaviorProvider.java
 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/DialectQueryBehaviorProvider.java
new file mode 100644
index 00000000000..f50b6ad8565
--- /dev/null
+++ 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/DialectQueryBehaviorProvider.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.test.e2e.sql.framework.metadata;
+
+import org.apache.shardingsphere.database.connector.core.spi.DatabaseTypedSPI;
+import org.apache.shardingsphere.infra.spi.annotation.SingletonSPI;
+
+import java.util.Optional;
+
+/**
+ * Dialect query behavior provider for E2E assertions.
+ */
+@SingletonSPI
+public interface DialectQueryBehaviorProvider extends DatabaseTypedSPI {
+    
+    /**
+     * Get fallback ORDER BY clause when primary key is unknown.
+     *
+     * <p>Return value should not contain the leading "ORDER BY" keyword.
+     * For example: "1 ASC" or "some_column ASC". Empty means no fallback.</p>
+     *
+     * @return optional fallback ORDER BY clause (without leading ORDER BY)
+     */
+    default Optional<String> getFallbackOrderByWhenNoPrimaryKey() {
+        return Optional.empty();
+    }
+}
diff --git 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
new file mode 100644
index 00000000000..5bf1325ef6f
--- /dev/null
+++ 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.test.e2e.sql.framework.metadata.dialect;
+
+import 
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider;
+
+import java.util.Optional;
+
+/**
+ * Hive dialect query behavior provider.
+ */
+public final class HiveDialectQueryBehaviorProvider implements 
DialectQueryBehaviorProvider {
+    
+    @Override
+    public String getDatabaseType() {
+        return "Hive";
+    }
+    
+    @Override
+    public Optional<String> getFallbackOrderByWhenNoPrimaryKey() {
+        return Optional.of("1 ASC");
+    }
+}
diff --git 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
index 550dd5aabf0..31dfb352e87 100644
--- 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
+++ 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
@@ -178,6 +178,9 @@ public final class E2ETestParameterGenerator {
     
     private Collection<E2ETestParameter> getCaseTestParameter(final 
SQLE2ETestCaseContext testCaseContext, final DatabaseType databaseType, final 
SQLCommandType sqlCommandType) {
         Collection<E2ETestParameter> result = new LinkedList<>();
+        if (testCaseContext.getTestCase().isSkipBatch() && 
"Hive".equalsIgnoreCase(databaseType.getType())) {
+            return Collections.emptyList();
+        }
         for (String each : envAdapters) {
             result.addAll(getCaseTestParameter(testCaseContext, each, 
databaseType, sqlCommandType));
         }
diff --git 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
index 34a88eb66e3..3efe223651d 100644
--- 
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
+++ 
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
@@ -22,6 +22,7 @@ import com.google.common.collect.Sets;
 import lombok.Getter;
 import lombok.Setter;
 import lombok.SneakyThrows;
+import 
org.apache.shardingsphere.database.connector.core.spi.DatabaseTypedSPILoader;
 import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
 import org.apache.shardingsphere.infra.datanode.DataNode;
 import 
org.apache.shardingsphere.infra.expr.entry.InlineExpressionParserFactory;
@@ -34,6 +35,7 @@ import 
org.apache.shardingsphere.test.e2e.sql.cases.dataset.DataSetLoader;
 import 
org.apache.shardingsphere.test.e2e.sql.cases.dataset.metadata.DataSetColumn;
 import 
org.apache.shardingsphere.test.e2e.sql.cases.dataset.metadata.DataSetMetaData;
 import org.apache.shardingsphere.test.e2e.sql.cases.dataset.row.DataSetRow;
+import 
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider;
 import org.apache.shardingsphere.test.e2e.sql.env.DataSetEnvironmentManager;
 import org.apache.shardingsphere.test.e2e.sql.env.SQLE2EEnvironmentEngine;
 import 
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectDatabaseAssertionMetaDataFactory;
@@ -233,14 +235,27 @@ public abstract class BaseDMLE2EIT implements SQLE2EIT {
     private String generateFetchActualDataSQL(final Map<String, DataSource> 
actualDataSourceMap, final DataNode dataNode, final DatabaseType databaseType) 
throws SQLException {
         String tableName = dataNode.getTableName();
         Optional<String> primaryKeyColumnName = 
DialectDatabaseAssertionMetaDataFactory.getPrimaryKeyColumnName(databaseType, 
actualDataSourceMap.get(dataNode.getDataSourceName()), tableName);
-        return primaryKeyColumnName.map(optional -> String.format("SELECT * 
FROM %s ORDER BY %s ASC", tableName, optional)).orElseGet(() -> 
String.format("SELECT * FROM %s", tableName));
+        if (primaryKeyColumnName.isPresent()) {
+            return String.format("SELECT * FROM %s ORDER BY %s ASC", 
tableName, primaryKeyColumnName.get());
+        }
+        Optional<DialectQueryBehaviorProvider> behaviorProvider = 
DatabaseTypedSPILoader.findService(DialectQueryBehaviorProvider.class, 
databaseType);
+        if (behaviorProvider.isPresent()) {
+            Optional<String> fallbackOrderBy = 
behaviorProvider.get().getFallbackOrderByWhenNoPrimaryKey();
+            if (fallbackOrderBy.isPresent()) {
+                return String.format("SELECT * FROM %s ORDER BY %s", 
tableName, fallbackOrderBy.get());
+            }
+        }
+        return String.format("SELECT * FROM %s", tableName);
     }
     
     private void assertMetaData(final ResultSetMetaData actual, final 
Collection<DataSetColumn> expected) throws SQLException {
         assertThat(actual.getColumnCount(), is(expected.size()));
         int index = 1;
         for (DataSetColumn each : expected) {
-            assertThat(actual.getColumnLabel(index++).toUpperCase(), 
is(each.getName().toUpperCase()));
+            String actualLabel = actual.getColumnLabel(index++);
+            int lastDotIndex = actualLabel != null ? 
actualLabel.lastIndexOf('.') : -1;
+            String normalizedLabel = lastDotIndex >= 0 ? 
actualLabel.substring(lastDotIndex + 1) : actualLabel;
+            assertThat(normalizedLabel.toUpperCase(), 
is(each.getName().toUpperCase()));
         }
     }
     
diff --git 
a/test/e2e/sql/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider
 
b/test/e2e/sql/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider
new file mode 100644
index 00000000000..8b3b354a253
--- /dev/null
+++ 
b/test/e2e/sql/src/test/resources/META-INF/services/org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.shardingsphere.test.e2e.sql.framework.metadata.dialect.HiveDialectQueryBehaviorProvider
diff --git 
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
 
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
new file mode 100644
index 00000000000..b96386db13d
--- /dev/null
+++ 
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
@@ -0,0 +1,59 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<dataset update-count="1">
+    <metadata data-nodes="encrypt.t_user">
+        <column name="user_id" type="numeric" />
+        <column name="user_name_cipher" type="varchar" />
+        <column name="user_name_like" type="varchar" />
+        <column name="password_cipher" type="varchar" />
+        <column name="email_cipher" type="varchar" />
+        <column name="user_telephone_cipher" type="varchar" />
+        <column name="user_telephone_like" type="varchar" />
+        <column name="creation_date" type="datetime" />
+    </metadata>
+    <row data-node="encrypt.t_user" values="10, sVq8Lmm+j6bZE5EKSilJEQ==, 
yi`mht`m, aQol0b6th65d0aXe+zFPsQ==, 
WM0fHOH91JNWnHTkiqBdyNmzk4uJ7CCz4mB1va9Ya1M=, kLjLJIMnfyHT2nA+viaoaQ==, 
01454589811, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="11, fQ7IzBxKVuNHtUF6h6WSBg==, 
mhth, wuhmEKgdgrWQYt+Ev0hgGA==, svATu3uWv9KfiloWJeWx3A==, 
0kDFxndQdzauFwL/wyCsNQ==, 01454589810, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="12, AQRWSlufQPog/b64YRhu6Q==, 
x`mhxt, x7A+2jq9B6DSOSFtSOibdA==, nHJv9e6NiClIuGHOjHLvCAq2ZLhWcqfQ8/EQnIqMx+g=, 
a/SzSJLapt5iBXvF2c9ycw==, 01454589811, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="13, 5NqS4YvpT+mHBFqZOZ3QDA==, 
yi`pmht, zi6b4xYRjjV+bBk2R4wB+w==, 
MLBZczLjriUXvg3aM5QPTxMJbLjNh8yeNrSNBek/VTw=, b6VVhG+F6ujG8IMUZJAIFg==, 
01454589814, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="14, qeIY9od3u1KwhjihzLQUTQ==, 
yitph, 51UmlLAC+tUvdOAj8CjWfQ==, JCmeNdPyrKO5BW5zvhAA+g==, 
f995xinpZdKMVU5J5/yv3w==, 01454589815, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="15, VbNUtguwtpeGhHGnPJ3aXg==, 
mha`, +3/5CVbqoKhg3sqznKTFFQ==, T+X+e3Q3+ZNIXXmg/80uxg==, 
GETj+S6DrO042E7NuBXLBQ==, 01454589814, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="16, U0/Ao/w1u7L5avR3fAH2Og==, 
x`mhiht, jFfFMYxv02DjaFRuAoCDGw==, 
RNW/KRq5HeL2YTfAdXSyARMJbLjNh8yeNrSNBek/VTw=, +lbvjJwO7VO4HUKc0Mw0NA==, 
01454589815, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="17, zb1sgBigoMi7JPSoY4bAVw==, 
yite`, VFIjocgjujJCJc6waWXqJA==, 1vF/ET3nBxt7T7vVfAndZQ==, 
wFvs5BH6OikgveBeTEBwsQ==, 01454589818, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="18, rJzNIrFEnx296kW+N1YmMw==, 
ttmdq, LaODSKGyR7vZ1IvmBOe9vA==, 5u4GIQkJsWRmnJHWaHNSjg==, 
uwqm2O1Lv2tNTraJX1ym7Q==, 01454589819, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="19, qHwpQ9kteL8VX6iTUhNdbQ==, 
yiptt`m, MyOShk4kjRnds7CZfU5NCw==, 
HmYCo7QBfJ2E0EvaGHBCOBMJbLjNh8yeNrSNBek/VTw=, YLNQuuUPMGA21nhKWPzzsg==, 
01454589818, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="20, qCCmvf7OWRxbVbtLb0az1g==, upl, 
fzdTMkzpBvgNYmKSQAp8Fg==, gOoP4Mf0P4ISOJp6A4sRmg==, l4xa4HwOfs/jusoJon9Wzw==, 
01454589801, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="21, IYJ1COaRQ0gSjWMC/UAeMg==, 
lpad, 1uEDMeYh2jstbOf6kx/cqw==, tikMAFiQ37u2VgWqUT38Eg==, 
rGpr30UXfczXjCjdvPN+BA==, 01454589800, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="22, 7wvZZ7NVHgk6m1vB/sTC1Q==, 
idqqx, OirN3gvz9uBnrq88nfa1wQ==, T7K/Uz1O2m+3xvB0+c4nGQ==, 
7+fCU+VbQZKgLJXZPTTegA==, 01454589801, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="23, SbVQWl8JbnxflCfGJ7KZdA==, 
i`ldt, hWVVYdkdTUTgm08haeq+tw==, Uk3ju6GteCD1qEHns5ZhKA==, 
DpnV86FZefwBRmIAVBh2gg==, 01454589804, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="24, fx7OfSAYqVpjNa7LoKhXvw==, 
x`ed, N2W9ijAXNkBxhkvJiIwp0A==, lAAGItVLmb1H69++1MDrIA==, 
QrE62wAb8B+2cEPcs4Lm1Q==, 01454589805, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="25, wH3/LdWShD9aCb8eCIm3Tg==, 
qptd, GDixtt6NzPOVv6H0dmov5g==, T1yfJSyVxumZUfkDnmUQxA==, 
iU+AsGczboCRfU+Zr7mcpw==, 01454589804, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="26, GgJQTndbxyBZ2tECS8SmqQ==, 
apti, gLgVFLFIyyKwdQCXaw78Ag==, O+JIn9XZ3yq6RnKElHuqlA==, 
kwYlbu9aF7ndvMTcj8QBSg==, 01454589805, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="27, lv8w8g32kuTXNvSUUypOig==, 
i`dl, 8i0YH2mn6kXSyvBjM5p+Yg==, gqRoJF5S66SvBalc2RCo1A==, 
2ob/3UYqRsZA5VdScnaWxQ==, 01454589808, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="28, P9YCbFvWCIhcS99KyKH2zA==, 
ipqe`m, PRrI4z4FrWwLvcHPx9g4og==, y8q31Jj4PFSyZHiLVIxKEQq2ZLhWcqfQ8/EQnIqMx+g=, 
kDF2za26uOerlNYWYHRT2Q==, 01454589809, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="29, 5wu9XvlJAVtjKijhxt6SQQ==, 
itmhd, O4pgkLgz34N+C4bIUOQVnA==, UH7ihg16J61Np/EYMQnXIA==, 
z2hbJQD4dRkVVITNxAac5Q==, 01454589808, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="30, i50fpEe3j0VOy/Hbfmr5Bg==, 兹卜, 
MyOShk4kjRnds7CZfU5NCw==, WM0fHOH91JNWnHTkiqBdyNmzk4uJ7CCz4mB1va9Ya1M=, 
uRbQGcAhuXmxdIzRsgiy8g==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="31, wLXX1Gs7zl0PkyJRMIxmCQ==, 兹付, 
fzdTMkzpBvgNYmKSQAp8Fg==, cq1LN85LmWHrEtS4gN/ac9mzk4uJ7CCz4mB1va9Ya1M=, 
jDL+p5OyKQXfQ4H8JpNKJQ==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="32, 5NNYIZCq4Dxab0uKWXt93A==, 兹咔, 
1uEDMeYh2jstbOf6kx/cqw==, rehTd++DbFy3qYqeDJUjRRMJbLjNh8yeNrSNBek/VTw=, 
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="33, 8kgeyqYaHUMq6di5BVt3Ow==, 伴伌, 
OirN3gvz9uBnrq88nfa1wQ==, svATu3uWv9KfiloWJeWx3A==, 60fpnMdKCWeyKzxkdthn2Q==, 
09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="34, THN1NIcHrqnfvPKR92lAKw==, 伴侁, 
ZSU1lmpj8cugQHD59QxPhQ==, KtMacpLCtRTSl+VeCPYrPwq2ZLhWcqfQ8/EQnIqMx+g=, 
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="35, R3tsv8yeGKXz7WXLYfjkPA==, 伈夸噥, 
31jh3gwg1fgDFnd6iYxU+Q==, ZsrMzd0mjJ9H3fxe4UNVfQsU49iKH47f2rb+/uKuo9M=, 
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="36, M2hIFm7Z/s4zGd+rPD1USA==, 伈僘噭, 
bO/8ha1eS/H8/3DugjdOAQ==, PyO5yO0W8v5cpLPhqASTHA==, 60fpnMdKCWeyKzxkdthn2Q==, 
09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="37, aXS0VfnqHIAnOAtDjsF/9Q==, 伈嶱啴, 
bO/8ha1eS/H8/3DugjdOAQ==, fwyOxfHtLxNuSCFmghYiY0qMsgbpjg5UIo3xmJOLGu0=, 
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="38, 59/68izQEdnwNSueX1lPAA==, 伈乄, 
ilD/Tk7DUG4+EuznS1bNLg==, 2emhAeiXPr0kHbFrhYlM1dmzk4uJ7CCz4mB1va9Ya1M=, 
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+    <row data-node="encrypt.t_user" values="39, fn9LnNltUAOWO0F0iy0+Jw==, 伈妅, 
qe/WdUiSPP1RAsSSuejGJw==, zx2omwIbXHpEJeh8ta7HqQq2ZLhWcqfQ8/EQnIqMx+g=, 
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+</dataset>
diff --git a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml 
b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
index 71084a404b2..5b124bfebcd 100644
--- a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
+++ b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
@@ -69,4 +69,8 @@
     <test-case sql="DELETE FROM t_order WHERE order_id = 1000 AND user_id = 
10;DELETE FROM t_order WHERE order_id = 1001 AND user_id = 11;DELETE FROM 
t_order WHERE user_id &lt; 29;" db-types="MySQL" scenario-types="db" 
sql-case-types="LITERAL" adapters="proxy">
         <assertion expected-data-file="delete_with_multiple.xml" />
     </test-case>
+
+    <test-case sql="DELETE FROM t_user WHERE user_id = ?" db-types="Hive" 
scenario-types="encrypt" skip-batch="true">
+        <assertion parameters="10:int" 
expected-data-file="delete_user_table.xml" />
+    </test-case>
 </e2e-test-cases>
diff --git a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml 
b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
index 9a7f9e0e586..bdff9d55e0b 100644
--- a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
+++ b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
@@ -254,4 +254,8 @@
             <destroy-sql sql="DELETE FROM t_single WHERE id = 1" />
         </assertion>
     </test-case>
+
+    <test-case sql="INSERT INTO t_user (user_id, user_name, password, email, 
telephone, creation_date) VALUES (?, ?, '123456', '[email protected]', 
'12341234123', '2018-08-08'), (?, ?, '23456', '[email protected]', '23452345456', 
'2019-08-08')" db-types="Hive" scenario-types="encrypt" skip-batch="true">
+        <assertion parameters="40:int, tomas:String, 41:int, mike:String" 
expected-data-file="batch_insert_into_user_table.xml" />
+    </test-case>
 </e2e-test-cases>
diff --git a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml 
b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
index 1f200e1fa5c..cfd29b4d818 100644
--- a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
+++ b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
@@ -109,4 +109,8 @@
     <test-case sql="UPDATE t_data_type_date SET creation_date = ?, update_date 
= ? where id = ?" db-types="MySQL" scenario-types="passthrough">
         <assertion parameters="2018-08-08:Date, 2018-08-08:datetime, 1:int" 
expected-data-file="update_date_values.xml"/>
     </test-case>
+
+    <test-case sql="UPDATE t_user SET password = ? WHERE user_id = ?" 
db-types="Hive" scenario-types="encrypt" skip-batch="true">
+        <assertion parameters="222222:String, 11:int" 
expected-data-file="update_user_table_hive.xml" />
+    </test-case>
 </e2e-test-cases>
diff --git 
a/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-group-by.xml 
b/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-group-by.xml
index 6fb9e383ddb..8b7ad2d8b79 100644
--- a/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-group-by.xml
+++ b/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-group-by.xml
@@ -82,4 +82,8 @@
                scenario-comments="Test MAX DISTINCT returns NULL when no data 
matches">
         <assertion expected-data-source-name="read_dataset" />
     </test-case>
+
+    <test-case sql="SELECT user_id, COUNT(*) AS user_count FROM t_user GROUP 
BY user_id ORDER BY user_id" db-types="Hive" scenario-types="mask">
+        <assertion expected-data-source-name="expected_dataset" />
+    </test-case>
 </e2e-test-cases>
diff --git 
a/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-order-by.xml 
b/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-order-by.xml
index 0d82cc65254..c060f79d376 100644
--- a/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-order-by.xml
+++ b/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select-order-by.xml
@@ -157,7 +157,7 @@
         <assertion expected-data-source-name="read_dataset" />
     </test-case>
     
-    <test-case sql="SELECT * FROM t_user ORDER BY user_id" 
db-types="MySQL,PostgreSQL,openGauss" 
scenario-types="mask,mask_encrypt,mask_sharding,mask_encrypt_sharding">
+    <test-case sql="SELECT * FROM t_user ORDER BY user_id" 
db-types="MySQL,PostgreSQL,openGauss,Hive" 
scenario-types="mask,mask_encrypt,mask_sharding,mask_encrypt_sharding">
         <assertion expected-data-source-name="expected_dataset" />
     </test-case>
 </e2e-test-cases>
diff --git a/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select.xml 
b/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select.xml
index 699e10b5e90..634f2cabd12 100644
--- a/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select.xml
+++ b/test/e2e/sql/src/test/resources/cases/dql/e2e-dql-select.xml
@@ -187,4 +187,8 @@
     <test-case sql="select * from shardingsphere.cluster_information;" 
db-types="MySQL,PostgreSQL,openGauss" scenario-types="db">
         <assertion expected-data-file="select_cluster_information.xml" />
     </test-case>
+
+    <test-case sql="SELECT user_name, telephone FROM t_user" db-types="Hive" 
scenario-types="mask">
+        <assertion expected-data-source-name="expected_dataset" />
+    </test-case>
 </e2e-test-cases>
diff --git 
a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/hive/50-scenario-actual-init.sql
 
b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/hive/50-scenario-actual-init.sql
index 112181e4c07..c6146edc501 100644
--- 
a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/hive/50-scenario-actual-init.sql
+++ 
b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/hive/50-scenario-actual-init.sql
@@ -15,14 +15,25 @@
 -- limitations under the License.
 --
 
-SET character_set_database='utf8';
-SET character_set_server='utf8';
+SET hive.exec.dynamic.partition=true;
+SET hive.exec.dynamic.partition.mode=nonstrict;
 
 DROP DATABASE IF EXISTS encrypt;
 CREATE DATABASE IF NOT EXISTS encrypt;
+USE encrypt;
 
-CREATE TABLE encrypt.t_order (order_id INT PRIMARY KEY, user_id INT NOT NULL, 
status VARCHAR(50) NOT NULL, merchant_id INT, remark VARCHAR(50) NOT NULL, 
creation_date DATE NOT NULL);
-CREATE TABLE encrypt.t_order_item (item_id INT PRIMARY KEY, order_id INT NOT 
NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT NULL, 
creation_date DATE NOT NULL);
-CREATE TABLE encrypt.t_user (user_id INT PRIMARY KEY, user_name_cipher 
VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher 
VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, user_telephone_cipher 
CHAR(50) NOT NULL, user_telephone_like CHAR(11) NOT NULL, creation_date DATE 
NOT NULL);
-CREATE TABLE encrypt.t_merchant (merchant_id INT PRIMARY KEY, country_id 
SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher 
VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, 
merchant_telephone_cipher CHAR(50) NOT NULL, merchant_telephone_like CHAR(11) 
NOT NULL, creation_date DATE NOT NULL);
+CREATE TABLE encrypt.t_order (order_id INT, user_id INT, status STRING, 
merchant_id INT, remark STRING, creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
 
+CREATE TABLE encrypt.t_order_item (item_id INT, order_id INT, user_id INT, 
product_id INT, quantity INT, creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
+
+CREATE TABLE encrypt.t_user (user_id INT, user_name_cipher STRING, 
user_name_like STRING, password_cipher STRING, email_cipher STRING, 
user_telephone_cipher STRING, user_telephone_like STRING, creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
+
+CREATE TABLE encrypt.t_merchant (merchant_id INT, country_id SMALLINT, 
merchant_name STRING, business_code_cipher STRING, business_code_like STRING, 
merchant_telephone_cipher STRING, merchant_telephone_like STRING, creation_date 
DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
diff --git 
a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/init-sql/hive/60-scenario-expected-init.sql
 
b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/init-sql/hive/60-scenario-expected-init.sql
index 235c38afa8b..4b804d66cf6 100644
--- 
a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/init-sql/hive/60-scenario-expected-init.sql
+++ 
b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/init-sql/hive/60-scenario-expected-init.sql
@@ -14,14 +14,25 @@
 -- See the License for the specific language governing permissions and
 -- limitations under the License.
 --
-SET character_set_database='utf8';
-SET character_set_server='utf8';
+SET hive.exec.dynamic.partition=true;
+SET hive.exec.dynamic.partition.mode=nonstrict;
 
 DROP DATABASE IF EXISTS expected_dataset;
 CREATE DATABASE IF NOT EXISTS expected_dataset;
+USE expected_dataset;
 
-CREATE TABLE expected_dataset.t_order (order_id INT PRIMARY KEY, user_id INT 
NOT NULL, status VARCHAR(50) NOT NULL, merchant_id INT, remark VARCHAR(50) NOT 
NULL, creation_date DATE NOT NULL);
-CREATE TABLE expected_dataset.t_order_item (item_id INT PRIMARY KEY, order_id 
INT NOT NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT 
NULL, creation_date DATE NOT NULL);
-CREATE TABLE expected_dataset.t_user (user_id INT PRIMARY KEY, user_name 
VARCHAR(50) NOT NULL, password VARCHAR(50) NOT NULL, email VARCHAR(50) NOT 
NULL, telephone CHAR(11) NOT NULL, creation_date DATE NOT NULL);
-CREATE TABLE expected_dataset.t_merchant (merchant_id INT PRIMARY KEY, 
country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code 
VARCHAR(50) NOT NULL, telephone CHAR(11) NOT NULL, creation_date DATE NOT NULL);
+CREATE TABLE expected_dataset.t_order (order_id INT, user_id INT, status 
STRING, merchant_id INT, remark STRING, creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
 
+CREATE TABLE expected_dataset.t_order_item (item_id INT, order_id INT, user_id 
INT, product_id INT, quantity INT, creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
+
+CREATE TABLE expected_dataset.t_user (user_id INT, user_name STRING, password 
STRING, email STRING, telephone STRING, creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
+
+CREATE TABLE expected_dataset.t_merchant (merchant_id INT, country_id 
SMALLINT, merchant_name STRING, business_code STRING, telephone STRING, 
creation_date DATE)
+    STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
diff --git 
a/test/e2e/sql/src/test/resources/env/scenario/mask/data/actual/init-sql/hive/50-scenario-actual-init.sql
 
b/test/e2e/sql/src/test/resources/env/scenario/mask/data/actual/init-sql/hive/50-scenario-actual-init.sql
new file mode 100644
index 00000000000..3c0d7fbd075
--- /dev/null
+++ 
b/test/e2e/sql/src/test/resources/env/scenario/mask/data/actual/init-sql/hive/50-scenario-actual-init.sql
@@ -0,0 +1,33 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+SET hive.exec.dynamic.partition=true;
+SET hive.exec.dynamic.partition.mode=nonstrict;
+
+CREATE DATABASE IF NOT EXISTS mask;
+USE mask;
+
+CREATE TABLE IF NOT EXISTS t_user (
+    user_id INT,
+    user_name STRING,
+    password STRING,
+    email STRING,
+    telephone STRING,
+    creation_date DATE
+)
+STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
diff --git 
a/test/e2e/sql/src/test/resources/env/scenario/mask/data/expected/init-sql/hive/60-scenario-expected-init.sql
 
b/test/e2e/sql/src/test/resources/env/scenario/mask/data/expected/init-sql/hive/60-scenario-expected-init.sql
new file mode 100644
index 00000000000..2996c1d11c8
--- /dev/null
+++ 
b/test/e2e/sql/src/test/resources/env/scenario/mask/data/expected/init-sql/hive/60-scenario-expected-init.sql
@@ -0,0 +1,33 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+SET hive.exec.dynamic.partition=true;
+SET hive.exec.dynamic.partition.mode=nonstrict;
+
+CREATE DATABASE IF NOT EXISTS expected_dataset;
+USE expected_dataset;
+
+CREATE TABLE IF NOT EXISTS t_user (
+    user_id INT,
+    user_name STRING,
+    password STRING,
+    email STRING,
+    telephone STRING,
+    creation_date DATE
+)
+STORED AS ORC
+TBLPROPERTIES ('transactional'='true');
diff --git 
a/test/e2e/sql/src/test/resources/env/scenario/mask/jdbc/conf/hive/rules.yaml 
b/test/e2e/sql/src/test/resources/env/scenario/mask/jdbc/conf/hive/rules.yaml
new file mode 100644
index 00000000000..43f80094c2d
--- /dev/null
+++ 
b/test/e2e/sql/src/test/resources/env/scenario/mask/jdbc/conf/hive/rules.yaml
@@ -0,0 +1,60 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+databaseName: mask
+
+dataSources:
+  mask:
+    url: 
jdbc:hive2://hive.mask.host:10000/mask?ssl=false&useUnicode=true&characterEncoding=utf-8&hive.exec.dynamic.partition=true&hive.exec.dynamic.partition.mode=nonstrict
+    dataSourceClassName: com.zaxxer.hikari.HikariDataSource
+    connectionTimeoutMilliseconds: 30000
+    idleTimeoutMilliseconds: 60000
+    maxLifetimeMilliseconds: 1800000
+    maxPoolSize: 2
+    minPoolSize: 2
+
+rules:
+  - !SINGLE
+    tables:
+      - "*.*"
+  - !MASK
+    maskAlgorithms:
+      md5_mask:
+        type: MD5
+      mask_before_special_chars_mask:
+        type: MASK_BEFORE_SPECIAL_CHARS
+        props:
+          special-chars: '@'
+          replace-char: '*'
+      keep_first_n_last_m_mask:
+        type: KEEP_FIRST_N_LAST_M
+        props:
+          first-n: 3
+          last-m: 4
+          replace-char: '*'
+    tables:
+      t_user:
+        columns:
+          password:
+            maskAlgorithm: md5_mask
+          email:
+            maskAlgorithm: mask_before_special_chars_mask
+          telephone:
+            maskAlgorithm: keep_first_n_last_m_mask
+
+props:
+  sql-show: true

Reply via email to