hbase git commit: HBASE-16778 Move testIllegalTableDescriptor out from TestFromClientSide

2016-10-05 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/master b548d4978 -> eb33b60a9


HBASE-16778 Move testIllegalTableDescriptor out from TestFromClientSide


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eb33b60a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eb33b60a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eb33b60a

Branch: refs/heads/master
Commit: eb33b60a954d8695f07b5ce71501760d732a85b6
Parents: b548d49
Author: Matteo Bertozzi 
Authored: Wed Oct 5 20:04:18 2016 -0700
Committer: Matteo Bertozzi 
Committed: Wed Oct 5 20:04:18 2016 -0700

--
 .../hadoop/hbase/client/TestFromClientSide.java | 148 -
 .../client/TestIllegalTableDescriptor.java  | 218 +++
 2 files changed, 218 insertions(+), 148 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eb33b60a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 50a566a..6bd9ccd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -88,7 +88,6 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
 import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
@@ -107,10 +106,8 @@ import 
org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.Pair;
-import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -5293,151 +5290,6 @@ public class TestFromClientSide {
   }
 
   @Test
-  public void testIllegalTableDescriptor() throws Exception {
-HTableDescriptor htd = new 
HTableDescriptor(TableName.valueOf("testIllegalTableDescriptor"));
-HColumnDescriptor hcd = new HColumnDescriptor(FAMILY);
-
-// create table with 0 families
-checkTableIsIllegal(htd);
-htd.addFamily(hcd);
-checkTableIsLegal(htd);
-
-htd.setMaxFileSize(1024); // 1K
-checkTableIsIllegal(htd);
-htd.setMaxFileSize(0);
-checkTableIsIllegal(htd);
-htd.setMaxFileSize(1024 * 1024 * 1024); // 1G
-checkTableIsLegal(htd);
-
-htd.setMemStoreFlushSize(1024);
-checkTableIsIllegal(htd);
-htd.setMemStoreFlushSize(0);
-checkTableIsIllegal(htd);
-htd.setMemStoreFlushSize(128 * 1024 * 1024); // 128M
-checkTableIsLegal(htd);
-
-htd.setRegionSplitPolicyClassName("nonexisting.foo.class");
-checkTableIsIllegal(htd);
-htd.setRegionSplitPolicyClassName(null);
-checkTableIsLegal(htd);
-
-hcd.setBlocksize(0);
-checkTableIsIllegal(htd);
-hcd.setBlocksize(1024 * 1024 * 128); // 128M
-checkTableIsIllegal(htd);
-hcd.setBlocksize(1024);
-checkTableIsLegal(htd);
-
-hcd.setTimeToLive(0);
-checkTableIsIllegal(htd);
-hcd.setTimeToLive(-1);
-checkTableIsIllegal(htd);
-hcd.setTimeToLive(1);
-checkTableIsLegal(htd);
-
-hcd.setMinVersions(-1);
-checkTableIsIllegal(htd);
-hcd.setMinVersions(3);
-try {
-  hcd.setMaxVersions(2);
-  fail();
-} catch (IllegalArgumentException ex) {
-  // expected
-  hcd.setMaxVersions(10);
-}
-checkTableIsLegal(htd);
-
-// HBASE-13776 Setting illegal versions for HColumnDescriptor
-//  does not throw IllegalArgumentException
-// finally, minVersions must be less than or equal to maxVersions
-hcd.setMaxVersions(4);
-hcd.setMinVersions(5);
-checkTableIsIllegal(htd);
-hcd.setMinVersions(3);
-
-hcd.setScope(-1);
-checkTableIsIllegal(htd);
-hcd.setScope(0);
-checkTableIsLegal(htd);
-
-try {
-  hcd.setDFSReplication((short) -1);
-  fail("Illegal value for setDFSReplication did not throw");
-} catch (IllegalArgumentException e) {
-  // pass
-}
-// set an illegal DFS replication value by hand
-

hbase git commit: HBASE-16776 Remove duplicated versions of countRow() in tests

2016-10-05 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/master 06758bf63 -> b548d4978


HBASE-16776 Remove duplicated versions of countRow() in tests


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b548d497
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b548d497
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b548d497

Branch: refs/heads/master
Commit: b548d4978b0bdbfc6bd4e68c3d13e00f8ea4002e
Parents: 06758bf
Author: Matteo Bertozzi 
Authored: Wed Oct 5 19:45:50 2016 -0700
Committer: Matteo Bertozzi 
Committed: Wed Oct 5 19:45:50 2016 -0700

--
 .../hadoop/hbase/HBaseTestingUtility.java   | 34 
 .../hadoop/hbase/client/TestFromClientSide.java | 37 +
 .../procedure/TestServerCrashProcedure.java | 12 +-
 .../regionserver/TestMobStoreCompaction.java| 39 --
 .../TestRegionMergeTransaction.java | 37 +
 .../regionserver/TestSplitTransaction.java  | 42 ++--
 .../hadoop/hbase/util/BaseTestHBaseFsck.java| 18 ++---
 7 files changed, 72 insertions(+), 147 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b548d497/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index cc384de..c74c399 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -2219,13 +2219,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
 for (byte[] family: families) {
   scan.addFamily(family);
 }
-ResultScanner results = table.getScanner(scan);
-int count = 0;
-for (@SuppressWarnings("unused") Result res : results) {
-  count++;
-}
-results.close();
-return count;
+return countRows(table, scan);
   }
 
   /**
@@ -2240,6 +2234,32 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
 }
   }
 
+  public int countRows(final Region region) throws IOException {
+return countRows(region, new Scan());
+  }
+
+  public int countRows(final Region region, final Scan scan) throws 
IOException {
+InternalScanner scanner = region.getScanner(scan);
+try {
+  return countRows(scanner);
+} finally {
+  scanner.close();
+}
+  }
+
+  public int countRows(final InternalScanner scanner) throws IOException {
+// Do not retrieve the mob data when scanning
+int scannedCount = 0;
+List results = new ArrayList();
+boolean hasMore = true;
+while (hasMore) {
+  hasMore = scanner.next(results);
+  scannedCount += results.size();
+  results.clear();
+}
+return scannedCount;
+  }
+
   /**
* Return an md5 digest of the entire contents of a table.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/b548d497/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index f10cce3a..50a566a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -504,7 +504,7 @@ public class TestFromClientSide {
 byte [] endKey = regions.get(0).getRegionInfo().getEndKey();
 // Count rows with a filter that stops us before passed 'endKey'.
 // Should be count of rows in first region.
-int endKeyCount = countRows(t, createScanWithRowFilter(endKey));
+int endKeyCount = TEST_UTIL.countRows(t, createScanWithRowFilter(endKey));
 assertTrue(endKeyCount < rowCount);
 
 // How do I know I did not got to second region?  Thats tough.  Can't 
really
@@ -516,29 +516,29 @@ public class TestFromClientSide {
 // New test.  Make it so scan goes into next region by one and then two.
 // Make sure count comes out right.
 byte [] key = new byte [] {endKey[0], endKey[1], (byte)(endKey[2] + 1)};
-int plusOneCount = countRows(t, createScanWithRowFilter(key));
+int plusOneCount = TEST_UTIL.countRows(t, createScanWithRowFilter(key));
 assertEquals(endKeyCount + 1, plusOneCount);
 key = new byte [] {endKey[0], endKey[1], (byte)(endKey[2] + 2)};
-int plusTwoCount = countRows(t, createScanWithRowFilter(key));
+  

[03/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
new file mode 100644
index 000..9355d07
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
@@ -0,0 +1,540 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.backup.impl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupCopyService;
+import org.apache.hadoop.hbase.backup.BackupInfo;
+import org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase;
+import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
+import org.apache.hadoop.hbase.backup.BackupRequest;
+import org.apache.hadoop.hbase.backup.BackupRestoreServerFactory;
+import org.apache.hadoop.hbase.backup.BackupType;
+import org.apache.hadoop.hbase.backup.HBackupFileSystem;
+import org.apache.hadoop.hbase.backup.impl.BackupException;
+import org.apache.hadoop.hbase.backup.impl.BackupManifest;
+import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
+import org.apache.hadoop.hbase.backup.impl.BackupRestoreConstants;
+import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
+import org.apache.hadoop.hbase.backup.util.BackupClientUtil;
+import org.apache.hadoop.hbase.backup.util.BackupServerUtil;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.FSUtils;
+
+@InterfaceAudience.Private
+public class FullTableBackupClient {
+  private static final Log LOG = 
LogFactory.getLog(FullTableBackupClient.class);
+
+  private Configuration conf;
+  private Connection conn;
+  private String backupId;
+  private List tableList;
+  HashMap newTimestamps = null;
+
+  private BackupManager backupManager;
+  private BackupInfo backupContext;
+
+  public FullTableBackupClient() {
+// Required by the Procedure framework to create the procedure on replay
+  }
+
+  public FullTableBackupClient(final Connection conn, final String backupId,
+  BackupRequest request)
+  throws IOException {
+backupManager = new BackupManager(conn, conn.getConfiguration());
+this.backupId = backupId;
+this.tableList = request.getTableList();
+this.conn = conn;
+this.conf = conn.getConfiguration();
+backupContext =
+backupManager.createBackupContext(backupId, BackupType.FULL, 
tableList, 
+  request.getTargetRootDir(),
+  request.getWorkers(), request.getBandwidth());
+if (tableList == null || tableList.isEmpty()) {
+  this.tableList = new ArrayList<>(backupContext.getTables());
+}
+  }
+
+  /**
+   * Begin the overall backup.
+   * @param backupContext backup context
+   * @throws IOException exception
+   */
+  static void beginBackup(BackupManager backupManager, BackupInfo 
backupContext) throws IOException {
+backupManager.setBackupContext(backupContext);
+// set the start timestamp of the overall backup
+long startTs = EnvironmentEdgeManager.currentTime();
+backupContext.setStartTs(startTs);
+// set overall backup status: ongoing
+backupContext.setState(BackupState.RUNNING);
+LOG.info("Backup " + 

[02/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/master/FullTableBackupProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/master/FullTableBackupProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/master/FullTableBackupProcedure.java
deleted file mode 100644
index 2d41423..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/master/FullTableBackupProcedure.java
+++ /dev/null
@@ -1,777 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.backup.master;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.NotServingRegionException;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.backup.BackupCopyService;
-import org.apache.hadoop.hbase.backup.BackupInfo;
-import org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase;
-import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-import org.apache.hadoop.hbase.backup.BackupRestoreServerFactory;
-import org.apache.hadoop.hbase.backup.BackupType;
-import org.apache.hadoop.hbase.backup.HBackupFileSystem;
-import org.apache.hadoop.hbase.backup.impl.BackupException;
-import org.apache.hadoop.hbase.backup.impl.BackupManager;
-import org.apache.hadoop.hbase.backup.impl.BackupManifest;
-import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
-import org.apache.hadoop.hbase.backup.impl.BackupRestoreConstants;
-import org.apache.hadoop.hbase.backup.util.BackupClientUtil;
-import org.apache.hadoop.hbase.backup.util.BackupServerUtil;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
-import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
-import org.apache.hadoop.hbase.procedure.MasterProcedureManager;
-import org.apache.hadoop.hbase.procedure.ProcedureUtil;
-import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
-import org.apache.hadoop.hbase.protobuf.generated.BackupProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.FullTableBackupState;
-import org.apache.hadoop.hbase.protobuf.generated.BackupProtos.ServerTimestamp;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.FSUtils;
-
-@InterfaceAudience.Private
-public class FullTableBackupProcedure
-extends StateMachineProcedure
-implements TableProcedureInterface {
-  private static final Log LOG = 
LogFactory.getLog(FullTableBackupProcedure.class);
-  
-  private static final String SNAPSHOT_BACKUP_MAX_ATTEMPTS_KEY = 
"hbase.backup.snapshot.attempts.max";
-  private static final int DEFAULT_SNAPSHOT_BACKUP_MAX_ATTEMPTS = 10;
-  
-  private static final String SNAPSHOT_BACKUP_ATTEMPTS_DELAY_KEY = 
"hbase.backup.snapshot.attempts.delay";
-  private static final int DEFAULT_SNAPSHOT_BACKUP_ATTEMPTS_DELAY = 1;
-  
-  private final AtomicBoolean aborted = new AtomicBoolean(false);
-  private Configuration conf;
-  private String backupId;
-  private List tableList;
-  private String targetRootDir;
-  HashMap newTimestamps = null;
-
-  private BackupManager backupManager;
-  private BackupInfo backupContext;

[06/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 4562a7c..d495e84 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -90,88 +90,6 @@ public final class MasterProtos {
 // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
   }
 
-  /**
-   * Protobuf enum {@code hbase.pb.RestoreTablesState}
-   */
-  public enum RestoreTablesState
-  implements com.google.protobuf.ProtocolMessageEnum {
-/**
- * VALIDATION = 1;
- */
-VALIDATION(0, 1),
-/**
- * RESTORE_IMAGES = 2;
- */
-RESTORE_IMAGES(1, 2),
-;
-
-/**
- * VALIDATION = 1;
- */
-public static final int VALIDATION_VALUE = 1;
-/**
- * RESTORE_IMAGES = 2;
- */
-public static final int RESTORE_IMAGES_VALUE = 2;
-
-
-public final int getNumber() { return value; }
-
-public static RestoreTablesState valueOf(int value) {
-  switch (value) {
-case 1: return VALIDATION;
-case 2: return RESTORE_IMAGES;
-default: return null;
-  }
-}
-
-public static com.google.protobuf.Internal.EnumLiteMap
-internalGetValueMap() {
-  return internalValueMap;
-}
-private static com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
-  new com.google.protobuf.Internal.EnumLiteMap() {
-public RestoreTablesState findValueByNumber(int number) {
-  return RestoreTablesState.valueOf(number);
-}
-  };
-
-public final com.google.protobuf.Descriptors.EnumValueDescriptor
-getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
-}
-public final com.google.protobuf.Descriptors.EnumDescriptor
-getDescriptorForType() {
-  return getDescriptor();
-}
-public static final com.google.protobuf.Descriptors.EnumDescriptor
-getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(1);
-}
-
-private static final RestoreTablesState[] VALUES = values();
-
-public static RestoreTablesState valueOf(
-com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-  if (desc.getType() != getDescriptor()) {
-throw new java.lang.IllegalArgumentException(
-  "EnumValueDescriptor is not for this type.");
-  }
-  return VALUES[desc.getIndex()];
-}
-
-private final int index;
-private final int value;
-
-private RestoreTablesState(int index, int value) {
-  this.index = index;
-  this.value = value;
-}
-
-// @@protoc_insertion_point(enum_scope:hbase.pb.RestoreTablesState)
-  }
-
   public interface AddColumnRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
@@ -58736,4572 +58654,36 @@ public final class MasterProtos {
   break;
 }
 case 8: {
-  int rawValue = input.readEnum();
-  
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability
 value = 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(rawValue);
-  if (value == null) {
-unknownFields.mergeVarintField(1, rawValue);
-  } else {
-if (!((mutable_bitField0_ & 0x0001) == 0x0001)) {
-  capabilities_ = new 
java.util.ArrayList();
-  mutable_bitField0_ |= 0x0001;
-}
-capabilities_.add(value);
-  }
-  break;
-}
-case 10: {
-  int length = input.readRawVarint32();
-  int oldLimit = input.pushLimit(length);
-  while(input.getBytesUntilLimit() > 0) {
-int rawValue = input.readEnum();
-
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability
 value = 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(rawValue);
-if (value == null) {
-  unknownFields.mergeVarintField(1, rawValue);
-} else {
-  if (!((mutable_bitField0_ & 0x0001) == 0x0001)) {
-capabilities_ = new 
java.util.ArrayList();
-mutable_bitField0_ |= 0x0001;
-  }
-  capabilities_.add(value);
-}
- 

[08/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseBackupAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseBackupAdmin.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseBackupAdmin.java
deleted file mode 100644
index dfa2fb1..000
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseBackupAdmin.java
+++ /dev/null
@@ -1,439 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.client;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.Future;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.backup.BackupInfo;
-import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-import org.apache.hadoop.hbase.backup.BackupRequest;
-import org.apache.hadoop.hbase.backup.BackupType;
-import org.apache.hadoop.hbase.backup.RestoreRequest;
-import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
-import org.apache.hadoop.hbase.backup.util.BackupClientUtil;
-import org.apache.hadoop.hbase.backup.util.BackupSet;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-
-/**
- * The administrative API implementation for HBase Backup . Obtain an instance 
from 
- * an {@link Admin#getBackupAdmin()} and call {@link #close()} afterwards.
- * BackupAdmin can be used to create backups, restore data from backups and 
for 
- * other backup-related operations. 
- *
- * @see Admin
- * @since 2.0
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-
-public class HBaseBackupAdmin implements BackupAdmin {
-  private static final Log LOG = LogFactory.getLog(HBaseBackupAdmin.class);
-
-  private final HBaseAdmin admin;
-  private final Connection conn;
-
-  HBaseBackupAdmin(HBaseAdmin admin) {
-this.admin = admin;
-this.conn = admin.getConnection();
-  }
-
-  @Override
-  public void close() throws IOException {
-  }
-
-  @Override
-  public BackupInfo getBackupInfo(String backupId) throws IOException {
-BackupInfo backupInfo = null;
-try (final BackupSystemTable table = new BackupSystemTable(conn)) {
-  backupInfo = table.readBackupInfo(backupId);
-  return backupInfo;
-}
-  }
-
-  @Override
-  public int getProgress(String backupId) throws IOException {
-BackupInfo backupInfo = null;
-try (final BackupSystemTable table = new BackupSystemTable(conn)) {
-  if (backupId == null) {
-ArrayList recentSessions = 
table.getBackupContexts(BackupState.RUNNING);
-if (recentSessions.isEmpty()) {
-  LOG.warn("No ongoing sessions found.");
-  return -1;
-}
-// else show status for ongoing session
-// must be one maximum
-return recentSessions.get(0).getProgress();
-  } else {
-
-backupInfo = table.readBackupInfo(backupId);
-if (backupInfo != null) {
-  return backupInfo.getProgress();
-} else {
-  LOG.warn("No information found for backupID=" + backupId);
-  return -1;
-}
-  }
-}
-  }
-
-  @Override
-  public int deleteBackups(String[] backupIds) throws IOException {
-// TODO: requires FT, failure will leave system
-// in non-consistent state
-// see HBASE-15227
-
-int totalDeleted = 0;
-Map allTablesMap = new HashMap();
-
-try (final BackupSystemTable sysTable = new BackupSystemTable(conn)) {
-  for (int i = 0; i < backupIds.length; i++) {
-BackupInfo info = sysTable.readBackupInfo(backupIds[i]);
-if (info != null) {
-  String rootDir = 

[05/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-protocol/src/main/protobuf/Backup.proto
--
diff --git a/hbase-protocol/src/main/protobuf/Backup.proto 
b/hbase-protocol/src/main/protobuf/Backup.proto
index 7d1ec4b..2b3feeb 100644
--- a/hbase-protocol/src/main/protobuf/Backup.proto
+++ b/hbase-protocol/src/main/protobuf/Backup.proto
@@ -27,7 +27,7 @@ option optimize_for = SPEED;
 
 import "HBase.proto";
 
-enum FullTableBackupState {
+/*enum FullTableBackupState {
   PRE_SNAPSHOT_TABLE = 1;
   SNAPSHOT_TABLES = 2;
   SNAPSHOT_COPY = 3;
@@ -44,7 +44,7 @@ message SnapshotTableStateData {
   required TableName table = 1;
   required string snapshotName = 2;
 }
-
+*/
 enum BackupType {
   FULL = 0;
   INCREMENTAL = 1;
@@ -119,9 +119,9 @@ message BackupInfo {
 STORE_MANIFEST = 5;
   } 
 }
-
+/*
 message BackupProcContext {
   required BackupInfo ctx = 1;
   repeated ServerTimestamp server_timestamp = 2;
 }
-
+*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-protocol/src/main/protobuf/Master.proto
--
diff --git a/hbase-protocol/src/main/protobuf/Master.proto 
b/hbase-protocol/src/main/protobuf/Master.proto
index 13dbd28..54d6c93 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -27,7 +27,6 @@ option java_generate_equals_and_hash = true;
 option optimize_for = SPEED;
 
 import "HBase.proto";
-import "Backup.proto";
 import "Client.proto";
 import "ClusterStatus.proto";
 import "ErrorHandling.proto";
@@ -541,42 +540,6 @@ message SecurityCapabilitiesResponse {
   repeated Capability capabilities = 1;
 }
 
-message BackupTablesRequest {
-  required BackupType type = 1;
-  repeated TableName tables = 2;
-  required string target_root_dir = 3;
-  optional int64 workers = 4;
-  optional int64 bandwidth = 5;
-  optional string backup_set_name = 6;
-  optional uint64 nonce_group = 7 [default = 0];
-  optional uint64 nonce = 8 [default = 0];
-}
-
-message BackupTablesResponse {
-  optional uint64 proc_id = 1;
-  optional string backup_id = 2;
-}
-
-enum RestoreTablesState {
-  VALIDATION = 1;
-  RESTORE_IMAGES = 2;
-}
-
-message RestoreTablesRequest {
-  required string backup_id = 1;
-  repeated TableName tables = 2;
-  repeated TableName target_tables = 3;
-  required string backup_root_dir = 4;
-  optional bool dependency_check_only = 5;
-  optional bool overwrite = 6;
-  optional uint64 nonce_group = 7 [default = 0];
-  optional uint64 nonce = 8 [default = 0];
-}
-
-message RestoreTablesResponse {
-  optional uint64 proc_id = 1;
-}
-
 service MasterService {
   /** Used by the client to get the number of regions that have received the 
updated schema */
   rpc GetSchemaAlterStatus(GetSchemaAlterStatusRequest)
@@ -852,11 +815,4 @@ service MasterService {
   rpc ListProcedures(ListProceduresRequest)
 returns(ListProceduresResponse);
 
-  /** backup table set */
-  rpc backupTables(BackupTablesRequest)
-returns(BackupTablesResponse);
-
-  /** restore table set */
-  rpc restoreTables(RestoreTablesRequest)
-returns(RestoreTablesResponse);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
new file mode 100644
index 000..82bdd4e
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java
@@ -0,0 +1,171 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.backup;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.Future;
+
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.util.BackupSet;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import 

[09/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupRestoreConstants.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupRestoreConstants.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupRestoreConstants.java
deleted file mode 100644
index ac1d2bc..000
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupRestoreConstants.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.backup.impl;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-
-/**
- * BackupRestoreConstants holds a bunch of HBase Backup and Restore constants
- */
-@InterfaceAudience.Private
-@InterfaceStability.Stable
-public final class BackupRestoreConstants {
-
-
-  // delimiter in tablename list in restore command
-  public static final String TABLENAME_DELIMITER_IN_COMMAND = ",";
-
-  public static final String CONF_STAGING_ROOT = 
"snapshot.export.staging.root";
-
-  public static final String BACKUPID_PREFIX = "backup_";
-
-  public static enum BackupCommand {
-CREATE, CANCEL, DELETE, DESCRIBE, HISTORY, STATUS, CONVERT, MERGE, STOP, 
SHOW, HELP, PROGRESS, SET,
-SET_ADD, SET_REMOVE, SET_DELETE, SET_DESCRIBE, SET_LIST
-  }
-
-  private BackupRestoreConstants() {
-// Can't be instantiated with this ctor.
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
deleted file mode 100644
index 3066282..000
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
+++ /dev/null
@@ -1,873 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.backup.impl;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.backup.BackupInfo;
-import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
-import org.apache.hadoop.hbase.backup.util.BackupClientUtil;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Delete;

[07/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
index 4699c81..c5220cc 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/BackupProtos.java
@@ -9,198 +9,27 @@ public final class BackupProtos {
   com.google.protobuf.ExtensionRegistry registry) {
   }
   /**
-   * Protobuf enum {@code hbase.pb.FullTableBackupState}
-   */
-  public enum FullTableBackupState
-  implements com.google.protobuf.ProtocolMessageEnum {
-/**
- * PRE_SNAPSHOT_TABLE = 1;
- */
-PRE_SNAPSHOT_TABLE(0, 1),
-/**
- * SNAPSHOT_TABLES = 2;
- */
-SNAPSHOT_TABLES(1, 2),
-/**
- * SNAPSHOT_COPY = 3;
- */
-SNAPSHOT_COPY(2, 3),
-/**
- * BACKUP_COMPLETE = 4;
- */
-BACKUP_COMPLETE(3, 4),
-;
-
-/**
- * PRE_SNAPSHOT_TABLE = 1;
- */
-public static final int PRE_SNAPSHOT_TABLE_VALUE = 1;
-/**
- * SNAPSHOT_TABLES = 2;
- */
-public static final int SNAPSHOT_TABLES_VALUE = 2;
-/**
- * SNAPSHOT_COPY = 3;
- */
-public static final int SNAPSHOT_COPY_VALUE = 3;
-/**
- * BACKUP_COMPLETE = 4;
- */
-public static final int BACKUP_COMPLETE_VALUE = 4;
-
-
-public final int getNumber() { return value; }
-
-public static FullTableBackupState valueOf(int value) {
-  switch (value) {
-case 1: return PRE_SNAPSHOT_TABLE;
-case 2: return SNAPSHOT_TABLES;
-case 3: return SNAPSHOT_COPY;
-case 4: return BACKUP_COMPLETE;
-default: return null;
-  }
-}
-
-public static 
com.google.protobuf.Internal.EnumLiteMap
-internalGetValueMap() {
-  return internalValueMap;
-}
-private static 
com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
-  new com.google.protobuf.Internal.EnumLiteMap() 
{
-public FullTableBackupState findValueByNumber(int number) {
-  return FullTableBackupState.valueOf(number);
-}
-  };
-
-public final com.google.protobuf.Descriptors.EnumValueDescriptor
-getValueDescriptor() {
-  return getDescriptor().getValues().get(index);
-}
-public final com.google.protobuf.Descriptors.EnumDescriptor
-getDescriptorForType() {
-  return getDescriptor();
-}
-public static final com.google.protobuf.Descriptors.EnumDescriptor
-getDescriptor() {
-  return 
org.apache.hadoop.hbase.protobuf.generated.BackupProtos.getDescriptor().getEnumTypes().get(0);
-}
-
-private static final FullTableBackupState[] VALUES = values();
-
-public static FullTableBackupState valueOf(
-com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
-  if (desc.getType() != getDescriptor()) {
-throw new java.lang.IllegalArgumentException(
-  "EnumValueDescriptor is not for this type.");
-  }
-  return VALUES[desc.getIndex()];
-}
-
-private final int index;
-private final int value;
-
-private FullTableBackupState(int index, int value) {
-  this.index = index;
-  this.value = value;
-}
-
-// @@protoc_insertion_point(enum_scope:hbase.pb.FullTableBackupState)
-  }
-
-  /**
-   * Protobuf enum {@code hbase.pb.IncrementalTableBackupState}
-   */
-  public enum IncrementalTableBackupState
-  implements com.google.protobuf.ProtocolMessageEnum {
-/**
- * PREPARE_INCREMENTAL = 1;
- */
-PREPARE_INCREMENTAL(0, 1),
-/**
- * INCREMENTAL_COPY = 2;
- */
-INCREMENTAL_COPY(1, 2),
-/**
- * INCR_BACKUP_COMPLETE = 3;
- */
-INCR_BACKUP_COMPLETE(2, 3),
-;
-
-/**
- * PREPARE_INCREMENTAL = 1;
- */
-public static final int PREPARE_INCREMENTAL_VALUE = 1;
-/**
- * INCREMENTAL_COPY = 2;
- */
-public static final int INCREMENTAL_COPY_VALUE = 2;
-/**
- * INCR_BACKUP_COMPLETE = 3;
- */
-public static final int INCR_BACKUP_COMPLETE_VALUE = 3;
-
-
-public final int getNumber() { return value; }
-
-public static IncrementalTableBackupState valueOf(int value) {
-  switch (value) {
-case 1: return PREPARE_INCREMENTAL;
-case 2: return INCREMENTAL_COPY;
-case 3: return INCR_BACKUP_COMPLETE;
-default: return null;
-  }
-}
-
-public static 
com.google.protobuf.Internal.EnumLiteMap
-internalGetValueMap() {
-  return internalValueMap;
-}
-private static 
com.google.protobuf.Internal.EnumLiteMap
-internalValueMap =
-  new 

[04/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManifest.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManifest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManifest.java
new file mode 100644
index 000..d10713d
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManifest.java
@@ -0,0 +1,791 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.backup.impl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupInfo;
+import org.apache.hadoop.hbase.backup.BackupType;
+import org.apache.hadoop.hbase.backup.util.BackupClientUtil;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.BackupProtos;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+
+
+/**
+ * Backup manifest Contains all the meta data of a backup image. The manifest 
info will be bundled
+ * as manifest file together with data. So that each backup image will contain 
all the info needed
+ * for restore.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class BackupManifest {
+
+  private static final Log LOG = LogFactory.getLog(BackupManifest.class);
+
+  // manifest file name
+  public static final String MANIFEST_FILE_NAME = ".backup.manifest";
+
+  // manifest file version, current is 1.0
+  public static final String MANIFEST_VERSION = "1.0";
+
+  // backup image, the dependency graph is made up by series of backup images
+
+  public static class BackupImage implements Comparable {
+
+private String backupId;
+private BackupType type;
+private String rootDir;
+private List tableList;
+private long startTs;
+private long completeTs;
+private ArrayList ancestors;
+
+public BackupImage() {
+  super();
+}
+
+public BackupImage(String backupId, BackupType type, String rootDir,
+List tableList, long startTs, long completeTs) {
+  this.backupId = backupId;
+  this.type = type;
+  this.rootDir = rootDir;
+  this.tableList = tableList;
+  this.startTs = startTs;
+  this.completeTs = completeTs;
+}
+
+static BackupImage fromProto(BackupProtos.BackupImage im) {
+  String backupId = im.getBackupId();
+  String rootDir = im.getRootDir();
+  long startTs = im.getStartTs();
+  long completeTs = im.getCompleteTs();
+  List tableListList = im.getTableListList();
+  List tableList = new ArrayList();
+  for(HBaseProtos.TableName tn : tableListList) {
+tableList.add(ProtobufUtil.toTableName(tn));
+  }
+  
+  List ancestorList = im.getAncestorsList();
+  
+  BackupType type =
+  im.getBackupType() == BackupProtos.BackupType.FULL ? BackupType.FULL:
+BackupType.INCREMENTAL;
+
+  BackupImage image = new BackupImage(backupId, type, rootDir, tableList, 
startTs, completeTs);
+  for(BackupProtos.BackupImage img: ancestorList) {
+image.addAncestor(fromProto(img));
+  }
+  return image;
+}
+
+BackupProtos.BackupImage toProto() {
+  

[10/10] hbase git commit: HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir Rodionov)

2016-10-05 Thread tedyu
HBASE-16727 Backup refactoring: remove MR dependencies from HMaster (Vladimir 
Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b14e2ab1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b14e2ab1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b14e2ab1

Branch: refs/heads/HBASE-7912
Commit: b14e2ab1c24e65ff88dd4c579acf83cb4ed0605e
Parents: e35f7b9
Author: tedyu 
Authored: Wed Oct 5 16:29:40 2016 -0700
Committer: tedyu 
Committed: Wed Oct 5 16:29:40 2016 -0700

--
 .../apache/hadoop/hbase/backup/BackupInfo.java  |   504 -
 .../hadoop/hbase/backup/BackupRequest.java  |91 -
 .../hadoop/hbase/backup/BackupStatus.java   |   104 -
 .../hadoop/hbase/backup/RestoreRequest.java |94 -
 .../hbase/backup/impl/BackupCommands.java   |   717 -
 .../hbase/backup/impl/BackupException.java  |86 -
 .../hbase/backup/impl/BackupManifest.java   |   791 -
 .../backup/impl/BackupRestoreConstants.java |47 -
 .../hbase/backup/impl/BackupSystemTable.java|   873 -
 .../backup/impl/BackupSystemTableHelper.java|   433 -
 .../hbase/backup/util/BackupClientUtil.java |   437 -
 .../hadoop/hbase/backup/util/BackupSet.java |62 -
 .../org/apache/hadoop/hbase/client/Admin.java   | 9 -
 .../apache/hadoop/hbase/client/BackupAdmin.java |   174 -
 .../hbase/client/ConnectionImplementation.java  |13 -
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   231 +-
 .../hadoop/hbase/client/HBaseBackupAdmin.java   |   439 -
 .../hadoop/hbase/protobuf/RequestConverter.java |44 -
 .../ClientSnapshotDescriptionUtils.java | 3 +-
 .../hbase/IntegrationTestBackupRestore.java |10 +-
 .../hbase/protobuf/generated/BackupProtos.java  | 15632 -
 .../hbase/protobuf/generated/MasterProtos.java  |  5893 +--
 hbase-protocol/src/main/protobuf/Backup.proto   | 8 +-
 hbase-protocol/src/main/protobuf/Master.proto   |44 -
 .../apache/hadoop/hbase/backup/BackupAdmin.java |   171 +
 .../apache/hadoop/hbase/backup/BackupInfo.java  |   504 +
 .../hadoop/hbase/backup/BackupRequest.java  |91 +
 .../hadoop/hbase/backup/BackupStatus.java   |   104 +
 .../hadoop/hbase/backup/RestoreDriver.java  | 4 +-
 .../hadoop/hbase/backup/RestoreRequest.java |94 +
 .../hbase/backup/impl/BackupCommands.java   |   720 +
 .../hbase/backup/impl/BackupException.java  |86 +
 .../hbase/backup/impl/BackupManifest.java   |   791 +
 .../backup/impl/BackupRestoreConstants.java |47 +
 .../hbase/backup/impl/BackupSystemTable.java|   926 +
 .../backup/impl/BackupSystemTableHelper.java|   433 +
 .../backup/impl/FullTableBackupClient.java  |   540 +
 .../hbase/backup/impl/HBaseBackupAdmin.java |   555 +
 .../backup/impl/IncrementalBackupManager.java   |27 +-
 .../impl/IncrementalTableBackupClient.java  |   235 +
 .../hbase/backup/impl/RestoreTablesClient.java  |   236 +
 .../backup/master/FullTableBackupProcedure.java |   777 -
 .../master/IncrementalTableBackupProcedure.java |   400 -
 .../backup/master/RestoreTablesProcedure.java   |   387 -
 .../hbase/backup/util/BackupClientUtil.java |   437 +
 .../hbase/backup/util/BackupServerUtil.java | 3 +-
 .../hadoop/hbase/backup/util/BackupSet.java |62 +
 .../hbase/backup/util/RestoreServerUtil.java|85 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   119 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |42 -
 .../hadoop/hbase/master/MasterServices.java |30 -
 .../hadoop/hbase/backup/TestBackupBase.java |24 +-
 .../hbase/backup/TestBackupBoundaryTests.java   | 8 +-
 .../hbase/backup/TestBackupDeleteRestore.java   | 1 -
 .../hbase/backup/TestBackupMultipleDeletes.java |20 +-
 .../hadoop/hbase/backup/TestFullRestore.java| 1 -
 .../hbase/backup/TestIncrementalBackup.java |61 +-
 .../TestIncrementalBackupDeleteTable.java   | 8 +-
 .../hadoop/hbase/backup/TestRemoteBackup.java   | 1 -
 .../hadoop/hbase/master/TestCatalogJanitor.java |17 -
 60 files changed, 13746 insertions(+), 21040 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b14e2ab1/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java
deleted file mode 100644
index be5ffea..000
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java
+++ /dev/null
@@ -1,504 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more 

hbase git commit: HBASE-16682 Fix Shell tests failure. NoClassDefFoundError for Minikdc.

2016-10-05 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/branch-1 afd3bc856 -> 29e8300a8


HBASE-16682 Fix Shell tests failure. NoClassDefFoundError for Minikdc.

Change-Id: Iaf23c4656a2af7a8b61557612c194ca944cf9c8a


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/29e8300a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/29e8300a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/29e8300a

Branch: refs/heads/branch-1
Commit: 29e8300a8720f6e500d831590f5eb79677f79d3d
Parents: afd3bc8
Author: Apekshit Sharma 
Authored: Wed Oct 5 14:08:21 2016 -0700
Committer: Apekshit Sharma 
Committed: Wed Oct 5 14:09:02 2016 -0700

--
 hbase-shell/pom.xml| 10 +-
 hbase-testing-util/pom.xml |  8 
 2 files changed, 17 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/29e8300a/hbase-shell/pom.xml
--
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index 1bf067f..ecbdb350 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -251,7 +251,7 @@
 
   org.apache.htrace
   htrace-core
-   
+
   
   
 
@@ -318,6 +318,10 @@
   hadoop-minicluster
   test
 
+
+  org.apache.hadoop
+  hadoop-minikdc
+
   
   
 
@@ -367,6 +371,10 @@
   org.apache.hadoop
   hadoop-minicluster
 
+
+  org.apache.hadoop
+  hadoop-minikdc
+
   
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/29e8300a/hbase-testing-util/pom.xml
--
diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml
index 41b8433..333fb57 100644
--- a/hbase-testing-util/pom.xml
+++ b/hbase-testing-util/pom.xml
@@ -177,6 +177,10 @@
 hadoop-minicluster
 compile
 
+
+org.apache.hadoop
+hadoop-minikdc
+
 
 
 

hbase git commit: HBASE-16753 There is a mismatch between suggested Java version in hbase-env.sh

2016-10-05 Thread dimaspivak
Repository: hbase
Updated Branches:
  refs/heads/master 3aa4dfa73 -> 1f1a13f2e


HBASE-16753 There is a mismatch between suggested Java version in hbase-env.sh

Signed-off-by: Dima Spivak 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1f1a13f2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1f1a13f2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1f1a13f2

Branch: refs/heads/master
Commit: 1f1a13f2e2a28eb818cd85b6c50e47b52aaa2c2e
Parents: 3aa4dfa
Author: Umesh Agashe 
Authored: Mon Oct 3 14:02:28 2016 -0700
Committer: Dima Spivak 
Committed: Wed Oct 5 10:16:41 2016 -0700

--
 bin/hbase-config.sh| 2 +-
 conf/hbase-env.cmd | 2 +-
 conf/hbase-env.sh  | 4 ++--
 src/main/asciidoc/_chapters/configuration.adoc | 2 +-
 4 files changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/bin/hbase-config.sh
--
diff --git a/bin/hbase-config.sh b/bin/hbase-config.sh
index d7d7e6f..bf4ee92 100644
--- a/bin/hbase-config.sh
+++ b/bin/hbase-config.sh
@@ -132,7 +132,7 @@ if [ -z "$JAVA_HOME" ]; then
 | Please download the latest Sun JDK from the Sun Java web site|
 | > http://www.oracle.com/technetwork/java/javase/downloads|
 |  |
-| HBase requires Java 1.7 or later.|
+| HBase requires Java 1.8 or later.|
 +==+
 EOF
 exit 1

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/conf/hbase-env.cmd
--
diff --git a/conf/hbase-env.cmd b/conf/hbase-env.cmd
index d16de55..8c8597e 100644
--- a/conf/hbase-env.cmd
+++ b/conf/hbase-env.cmd
@@ -18,7 +18,7 @@
 
 @rem Set environment variables here.
 
-@rem The java implementation to use.  Java 1.7+ required.
+@rem The java implementation to use.  Java 1.8+ required.
 @rem set JAVA_HOME=c:\apps\java
 
 @rem Extra Java CLASSPATH elements.  Optional.

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/conf/hbase-env.sh
--
diff --git a/conf/hbase-env.sh b/conf/hbase-env.sh
index 31e8441..d9879c6 100644
--- a/conf/hbase-env.sh
+++ b/conf/hbase-env.sh
@@ -24,8 +24,8 @@
 # so try to keep things idempotent unless you want to take an even deeper look
 # into the startup scripts (bin/hbase, etc.)
 
-# The java implementation to use.  Java 1.7+ required.
-# export JAVA_HOME=/usr/java/jdk1.6.0/
+# The java implementation to use.  Java 1.8+ required.
+# export JAVA_HOME=/usr/java/jdk1.8.0/
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HBASE_CLASSPATH=

http://git-wip-us.apache.org/repos/asf/hbase/blob/1f1a13f2/src/main/asciidoc/_chapters/configuration.adoc
--
diff --git a/src/main/asciidoc/_chapters/configuration.adoc 
b/src/main/asciidoc/_chapters/configuration.adoc
index 4804332..048b047 100644
--- a/src/main/asciidoc/_chapters/configuration.adoc
+++ b/src/main/asciidoc/_chapters/configuration.adoc
@@ -729,7 +729,7 @@ The following lines in the _hbase-env.sh_ file show how to 
set the `JAVA_HOME` e
 
 
 # The java implementation to use.
-export JAVA_HOME=/usr/java/jdk1.7.0/
+export JAVA_HOME=/usr/java/jdk1.8.0/
 
 # The maximum amount of heap to use. Default is left to JVM default.
 export HBASE_HEAPSIZE=4G



[2/2] hbase git commit: HBASE-16690 Move znode path configs to a separated class

2016-10-05 Thread zhangduo
HBASE-16690 Move znode path configs to a separated class


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3aa4dfa7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3aa4dfa7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3aa4dfa7

Branch: refs/heads/master
Commit: 3aa4dfa73d56a1a6a42274e8d65dcbb694a072c7
Parents: 617dfe1
Author: zhangduo 
Authored: Fri Sep 23 23:30:43 2016 +0800
Committer: zhangduo 
Committed: Wed Oct 5 20:12:44 2016 +0800

--
 .../hbase/client/ConnectionImplementation.java  |   4 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   2 +-
 .../hadoop/hbase/client/ZooKeeperRegistry.java  |   2 +-
 .../replication/ReplicationStateZKBase.java |   3 +-
 .../replication/ReplicationTrackerZKImpl.java   |   4 +-
 .../hbase/zookeeper/MasterAddressTracker.java   |  13 +-
 .../hbase/zookeeper/MetaTableLocator.java   |  11 +-
 .../hadoop/hbase/zookeeper/ZKClusterId.java |   6 +-
 .../apache/hadoop/hbase/zookeeper/ZKUtil.java   |  15 +-
 .../hadoop/hbase/zookeeper/ZNodePaths.java  | 176 +++
 .../hadoop/hbase/zookeeper/ZkAclReset.java  |   4 +-
 .../hbase/zookeeper/ZooKeeperNodeTracker.java   |  10 +-
 .../hbase/zookeeper/ZooKeeperWatcher.java   | 213 +++
 .../hbase/zookeeper/TestZooKeeperWatcher.java   |  42 ++--
 .../hbase/IntegrationTestMetaReplicas.java  |   2 +-
 .../test/IntegrationTestZKAndFSPermissions.java |   2 +-
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   |   6 +-
 .../rsgroup/VerifyingRSGroupAdminClient.java|   2 +-
 .../hbase/tmpl/master/MasterStatusTmpl.jamon|   2 +-
 .../apache/hadoop/hbase/ZKNamespaceManager.java |   2 +-
 .../backup/example/ZKTableArchiveClient.java|   4 +-
 .../ZKSplitLogManagerCoordination.java  |  41 ++--
 .../ZkSplitLogWorkerCoordination.java   |  26 +--
 .../hbase/master/ActiveMasterManager.java   |  18 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   8 +-
 .../hbase/master/MasterMetaBootstrap.java   |   9 +-
 .../hadoop/hbase/master/ServerManager.java  |   2 +-
 .../hadoop/hbase/master/TableLockManager.java   |  12 +-
 .../hadoop/hbase/mob/mapreduce/SweepJob.java|   3 +-
 .../hadoop/hbase/procedure/ZKProcedureUtil.java |   2 +-
 .../hbase/regionserver/HRegionServer.java   |   6 +-
 .../replication/HBaseReplicationEndpoint.java   |   4 +-
 .../security/access/ZKPermissionWatcher.java|   8 +-
 .../hbase/security/token/ZKSecretWatcher.java   |   2 +-
 .../visibility/ZKVisibilityLabelWatcher.java|   4 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   4 +-
 .../hadoop/hbase/util/ZKDataMigrator.java   |   4 +-
 .../hbase/util/hbck/ReplicationChecker.java |   2 +-
 .../hbase/zookeeper/ClusterStatusTracker.java   |  10 +-
 .../hbase/zookeeper/DrainingServerTracker.java  |   8 +-
 .../hbase/zookeeper/LoadBalancerTracker.java|   8 +-
 .../zookeeper/MasterMaintenanceModeTracker.java |   4 +-
 .../zookeeper/RecoveringRegionWatcher.java  |   4 +-
 .../zookeeper/RegionNormalizerTracker.java  |   8 +-
 .../hbase/zookeeper/RegionServerTracker.java|  10 +-
 .../hbase/zookeeper/SplitOrMergeTracker.java|   8 +-
 .../hadoop/hbase/zookeeper/ZKSplitLog.java  |  14 +-
 .../hbase/client/TestMetaWithReplicas.java  |   4 +-
 .../hbase/master/TestActiveMasterManager.java   |  14 +-
 .../master/TestDistributedLogSplitting.java |  16 +-
 .../hbase/master/TestHMasterRPCException.java   |   6 +-
 .../hbase/master/TestMasterNoCluster.java   |   2 +-
 .../hbase/master/TestMasterStatusServlet.java   |   2 +
 .../hbase/master/TestMasterWalManager.java  |   9 +-
 .../hbase/master/TestMetaShutdownHandler.java   |   2 +-
 .../hbase/master/TestSplitLogManager.java   |  18 +-
 .../hbase/master/TestTableLockManager.java  |   5 +-
 .../hbase/master/TestTableStateManager.java |   2 +-
 .../hbase/mob/mapreduce/TestMobSweepMapper.java |   2 +-
 .../mob/mapreduce/TestMobSweepReducer.java  |   2 +-
 .../regionserver/TestMasterAddressTracker.java  |   6 +-
 .../regionserver/TestRegionServerHostname.java  |   2 +-
 .../hbase/regionserver/TestSplitLogWorker.java  |  26 +--
 .../TestReplicationStateHBaseImpl.java  |   2 +-
 .../replication/TestReplicationStateZKImpl.java |   4 +-
 .../TestReplicationTrackerZKImpl.java   |  19 +-
 .../hadoop/hbase/zookeeper/TestZKMulti.java |  40 ++--
 .../zookeeper/TestZooKeeperNodeTracker.java |   6 +-
 .../lock/TestZKInterProcessReadWriteLock.java   |   6 +-
 69 files changed, 491 insertions(+), 458 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3aa4dfa7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java

[1/2] hbase git commit: HBASE-16690 Move znode path configs to a separated class

2016-10-05 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 617dfe18c -> 3aa4dfa73


http://git-wip-us.apache.org/repos/asf/hbase/blob/3aa4dfa7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index b07a40c..159d067 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -3225,7 +3225,7 @@ public class HRegionServer extends HasThread implements
   }
 
   private String getMyEphemeralNodePath() {
-return ZKUtil.joinZNode(this.zooKeeper.rsZNode, 
getServerName().toString());
+return ZKUtil.joinZNode(this.zooKeeper.znodePaths.rsZNode, 
getServerName().toString());
   }
 
   private boolean isHealthCheckerConfigured() {
@@ -3267,7 +3267,7 @@ public class HRegionServer extends HasThread implements
 
 try {
   long lastRecordedFlushedSequenceId = -1;
-  String nodePath = ZKUtil.joinZNode(this.zooKeeper.recoveringRegionsZNode,
+  String nodePath = 
ZKUtil.joinZNode(this.zooKeeper.znodePaths.recoveringRegionsZNode,
 regionInfo.getEncodedName());
   // recovering-region level
   byte[] data;
@@ -3308,7 +3308,7 @@ public class HRegionServer extends HasThread implements
 String result = null;
 long maxZxid = 0;
 ZooKeeperWatcher zkw = this.getZooKeeper();
-String nodePath = ZKUtil.joinZNode(zkw.recoveringRegionsZNode, 
encodedRegionName);
+String nodePath = ZKUtil.joinZNode(zkw.znodePaths.recoveringRegionsZNode, 
encodedRegionName);
 List failedServers = ZKUtil.listChildrenNoWatch(zkw, nodePath);
 if (failedServers == null || failedServers.isEmpty()) {
   return result;

http://git-wip-us.apache.org/repos/asf/hbase/blob/3aa4dfa7/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
index 6485e4a..1a603e0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java
@@ -147,7 +147,7 @@ public abstract class HBaseReplicationEndpoint extends 
BaseReplicationEndpoint
*/
   protected static List fetchSlavesAddresses(ZooKeeperWatcher zkw)
   throws KeeperException {
-List children = ZKUtil.listChildrenAndWatchForNewChildren(zkw, 
zkw.rsZNode);
+List children = ZKUtil.listChildrenAndWatchForNewChildren(zkw, 
zkw.znodePaths.rsZNode);
 if (children == null) {
   return Collections.emptyList();
 }
@@ -208,7 +208,7 @@ public abstract class HBaseReplicationEndpoint extends 
BaseReplicationEndpoint
 public PeerRegionServerListener(HBaseReplicationEndpoint replicationPeer) {
   super(replicationPeer.getZkw());
   this.replicationEndpoint = replicationPeer;
-  this.regionServerListNode = replicationEndpoint.getZkw().rsZNode;
+  this.regionServerListNode = 
replicationEndpoint.getZkw().znodePaths.rsZNode;
 }
 
 @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/3aa4dfa7/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
index c4b3219..308ef41 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
@@ -66,7 +66,7 @@ public class ZKPermissionWatcher extends ZooKeeperListener 
implements Closeable
 super(watcher);
 this.authManager = authManager;
 String aclZnodeParent = conf.get("zookeeper.znode.acl.parent", ACL_NODE);
-this.aclZNode = ZKUtil.joinZNode(watcher.baseZNode, aclZnodeParent);
+this.aclZNode = ZKUtil.joinZNode(watcher.znodePaths.baseZNode, 
aclZnodeParent);
 executor = Executors.newSingleThreadExecutor(
   new DaemonThreadFactory("zk-permission-watcher"));
   }
@@ -249,7 +249,7 @@ public class ZKPermissionWatcher extends ZooKeeperListener 
implements Closeable
*/
   public void writeToZookeeper(byte[] entry, byte[] permsData) {
 String entryName = Bytes.toString(entry);
-String zkNode = ZKUtil.joinZNode(watcher.baseZNode, ACL_NODE);

hbase git commit: HBASE-16758 back HBaseZeroCopyByteStringer stuff.

2016-10-05 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/master 6a9b57b39 -> 617dfe18c


HBASE-16758  back HBaseZeroCopyByteStringer stuff.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/617dfe18
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/617dfe18
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/617dfe18

Branch: refs/heads/master
Commit: 617dfe18cdc287ea5886e5a9567c9abcd6c0fa28
Parents: 6a9b57b
Author: anoopsamjohn 
Authored: Wed Oct 5 12:57:13 2016 +0530
Committer: anoopsamjohn 
Committed: Wed Oct 5 12:57:13 2016 +0530

--
 .../coprocessor/BigDecimalColumnInterpreter.java   |  5 ++---
 .../hbase/security/access/AccessControlUtil.java   | 17 +
 .../security/visibility/VisibilityClient.java  |  7 ---
 .../FanOutOneBlockAsyncDFSOutputSaslHelper.java|  4 ++--
 .../hadoop/hbase/security/token/TokenUtil.java |  5 +++--
 .../security/visibility/VisibilityController.java  |  4 ++--
 .../hbase/security/visibility/VisibilityUtils.java |  6 ++
 7 files changed, 24 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/617dfe18/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
index 7d08b7e..9036273 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
@@ -30,10 +30,9 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.EmptyMsg;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 
-import com.google.protobuf.ByteString;
-
 /**
  * ColumnInterpreter for doing Aggregation's with BigDecimal columns. This 
class
  * is required at the RegionServer also.
@@ -124,7 +123,7 @@ public class BigDecimalColumnInterpreter extends 
ColumnInterpreter