This is an automated email from the ASF dual-hosted git repository.
kadir pushed a commit to branch 4.x-HBase-1.3
in repository https://gitbox.apache.org/repos/asf/phoenix.git
The following commit(s) were added to refs/heads/4.x-HBase-1.3 by this push:
new 1cccd89 PHOENIX-5674 IndexTool to not write already correct index
rows/CFs
1cccd89 is described below
commit 1cccd89fe36e05e08ca2b5058f4d46373c69b327
Author: Kadir
AuthorDate: Tue Jan 14 15:47:33 2020 -0800
PHOENIX-5674 IndexTool to not write already correct index rows/CFs
---
.../org/apache/phoenix/end2end/IndexToolIT.java| 80 +-
.../coprocessor/IndexRebuildRegionScanner.java | 280 +
.../apache/phoenix/mapreduce/index/IndexTool.java | 15 +-
3 files changed, 262 insertions(+), 113 deletions(-)
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
index 2eeeb4a..ce24e6d 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
@@ -37,26 +37,29 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.phoenix.end2end.index.GlobalIndexCheckerIT;
import org.apache.phoenix.hbase.index.IndexRegionObserver;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.mapreduce.index.IndexTool;
@@ -378,6 +381,24 @@ public class IndexToolIT extends
BaseUniqueNamesOwnClusterIT {
}
}
+public static class MutationCountingRegionObserver extends
SimpleRegionObserver {
+public static AtomicInteger mutationCount = new AtomicInteger(0);
+
+public static void setMutationCount(int value) {
+mutationCount.set(0);
+}
+
+public static int getMutationCount() {
+return mutationCount.get();
+}
+
+@Override
+public void
preBatchMutate(ObserverContext c,
+ MiniBatchOperationInProgress
miniBatchOp) throws HBaseIOException {
+mutationCount.addAndGet(miniBatchOp.size());
+}
+}
+
private Cell getErrorMessageFromIndexToolOutputTable(Connection conn,
String dataTableFullName, String indexTableFullName)
throws Exception {
byte[] indexTableFullNameBytes = Bytes.toBytes(indexTableFullName);
@@ -416,6 +437,53 @@ public class IndexToolIT extends
BaseUniqueNamesOwnClusterIT {
}
@Test
+public void testIndexToolVerifyBeforeAndBothOptions() throws Exception {
+// This test is for building non-transactional global indexes with
direct api
+if (localIndex || transactional || !directApi || useSnapshot) {
+return;
+}
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+try (Connection conn = DriverManager.getConnection(getUrl(), props)) {
+String schemaName = generateUniqueName();
+String dataTableName = generateUniqueName();
+String dataTableFullName = SchemaUtil.getTableName(schemaName,
dataTableName);
+String indexTableName = generateUniqueName();
+String viewName = generateUniqueName();
+String viewFullName = SchemaUtil.getTableName(schemaName,
viewName);
+conn.createStatement().execute("CREATE TABLE " + dataTableFullName
++ " (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, ZIP
INTEGER) "
++ tableDDLOptions);
+conn.commit();
+conn.createStatement().execute("CREATE VIEW " + viewFullName + "
AS SELECT * FROM "