hbase git commit: HBASE-16530 Reduce DBE code duplication

2016-09-07 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/master e65817ef1 -> fc224ed0e


HBASE-16530 Reduce DBE code duplication

Signed-off-by: Yu Li 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fc224ed0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fc224ed0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fc224ed0

Branch: refs/heads/master
Commit: fc224ed0eda20f876b3ecd3a858e919afa30ec12
Parents: e65817e
Author: binlijin 
Authored: Thu Sep 8 13:44:41 2016 +0800
Committer: Yu Li 
Committed: Thu Sep 8 13:48:27 2016 +0800

--
 .../io/encoding/AbstractDataBlockEncoder.java   | 83 
 .../io/encoding/BufferedDataBlockEncoder.java   | 48 ++-
 .../io/encoding/CopyKeyDataBlockEncoder.java| 53 +++--
 .../hadoop/hbase/io/encoding/NoneEncoder.java   | 68 
 .../hbase/io/encoding/RowIndexCodecV1.java  | 31 +---
 .../hbase/io/encoding/RowIndexEncoderV1.java| 35 ++---
 .../hbase/io/encoding/RowIndexSeekerV1.java | 18 +
 .../hbase/io/hfile/NoOpDataBlockEncoder.java| 59 +++---
 8 files changed, 221 insertions(+), 174 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fc224ed0/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java
 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java
new file mode 100644
index 000..8a3dadd
--- /dev/null
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/AbstractDataBlockEncoder.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hbase.io.encoding;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.hbase.ByteBufferedKeyOnlyKeyValue;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.io.hfile.BlockType;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
+
+@InterfaceAudience.Private
+public abstract class AbstractDataBlockEncoder implements DataBlockEncoder {
+
+  @Override
+  public HFileBlockEncodingContext newDataBlockEncodingContext(
+  DataBlockEncoding encoding, byte[] header, HFileContext meta) {
+return new HFileBlockDefaultEncodingContext(encoding, header, meta);
+  }
+
+  @Override
+  public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext 
meta) {
+return new HFileBlockDefaultDecodingContext(meta);
+  }
+
+  protected void postEncoding(HFileBlockEncodingContext encodingCtx)
+  throws IOException {
+if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
+  encodingCtx.postEncoding(BlockType.ENCODED_DATA);
+} else {
+  encodingCtx.postEncoding(BlockType.DATA);
+}
+  }
+
+  protected Cell createFirstKeyCell(ByteBuffer key, int keyLength) {
+if (key.hasArray()) {
+  return new KeyValue.KeyOnlyKeyValue(key.array(), key.arrayOffset()
+  + key.position(), keyLength);
+} else {
+  return new ByteBufferedKeyOnlyKeyValue(key, key.position(), keyLength);
+}
+  }
+
+  protected abstract static class AbstractEncodedSeeker implements
+  EncodedSeeker {
+protected HFileBlockDecodingContext decodingCtx;
+protected final CellComparator comparator;
+
+public AbstractEncodedSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) {
+  this.comparator = comparator;
+  this.decodingCtx = decodingCtx;
+}
+
+protected boolean includesMvcc() {
+  return this.decodingCtx.getHFileContext().isIncludesMvcc();
+}
+
+protected boolean includesTags() {
+  return 

hbase git commit: HBASE-16544 Remove or Clarify 'Using Amazon S3 Storage' section in the reference guide (Yi Liang)

2016-09-07 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/master e9cfbfd10 -> e65817ef1


HBASE-16544 Remove or Clarify 'Using Amazon S3 Storage' section in the 
reference guide (Yi Liang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e65817ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e65817ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e65817ef

Branch: refs/heads/master
Commit: e65817ef15078e262c00d097996852be2716bb87
Parents: e9cfbfd
Author: Jerry He 
Authored: Wed Sep 7 18:50:02 2016 -0700
Committer: Jerry He 
Committed: Wed Sep 7 18:50:02 2016 -0700

--
 src/main/asciidoc/_chapters/configuration.adoc | 31 -
 1 file changed, 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e65817ef/src/main/asciidoc/_chapters/configuration.adoc
--
diff --git a/src/main/asciidoc/_chapters/configuration.adoc 
b/src/main/asciidoc/_chapters/configuration.adoc
index 8dc3e8a..89820ca 100644
--- a/src/main/asciidoc/_chapters/configuration.adoc
+++ b/src/main/asciidoc/_chapters/configuration.adoc
@@ -1090,37 +1090,6 @@ Only a subset of all configurations can currently be 
changed in the running serv
 Here is an incomplete list: `hbase.regionserver.thread.compaction.large`, 
`hbase.regionserver.thread.compaction.small`, 
`hbase.regionserver.thread.split`, `hbase.regionserver.thread.merge`, as well 
as compaction policy and configurations and adjustment to offpeak hours.
 For the full list consult the patch attached to  
link:https://issues.apache.org/jira/browse/HBASE-12147[HBASE-12147 Porting 
Online Config Change from 89-fb].
 
-[[amazon_s3_configuration]]
-== Using Amazon S3 Storage
-
-HBase is designed to be tightly coupled with HDFS, and testing of other 
filesystems
-has not been thorough.
-
-The following limitations have been reported:
-
-- RegionServers should be deployed in Amazon EC2 to mitigate latency and 
bandwidth
-limitations when accessing the filesystem, and RegionServers must remain 
available
-to preserve data locality.
-- S3 writes each inbound and outbound file to disk, which adds overhead to 
each operation.
-- The best performance is achieved when all clients and servers are in the 
Amazon
-cloud, rather than a heterogenous architecture.
-- You must be aware of the location of `hadoop.tmp.dir` so that the local 
`/tmp/`
-directory is not filled to capacity.
-- HBase has a different file usage pattern than MapReduce jobs and has been 
optimized for
-HDFS, rather than distant networked storage.
-- The `s3a://` protocol is strongly recommended. The `s3n://` and `s3://` 
protocols have serious
-limitations and do not use the Amazon AWS SDK. The `s3a://` protocol is 
supported
-for use with HBase if you use Hadoop 2.6.1 or higher with HBase 1.2 or higher. 
Hadoop
-2.6.0 is not supported with HBase at all.
-
-Configuration details for Amazon S3 and associated Amazon services such as EMR 
are
-out of the scope of the HBase documentation. See the
-link:https://wiki.apache.org/hadoop/AmazonS3[Hadoop Wiki entry on Amazon S3 
Storage]
-and
-link:http://docs.aws.amazon.com/ElasticMapReduce/latest/DeveloperGuide/emr-hbase.html[Amazon's
 documentation for deploying HBase in EMR].
-
-One use case that is well-suited for Amazon S3 is storing snapshots. See 
<>.
-
 ifdef::backend-docbook[]
 [index]
 == Index



hbase git commit: HBASE-16566 Add nonce support to TableBackupProcedure

2016-09-07 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 e01ac71fb -> 8f6e3a63f


HBASE-16566 Add nonce support to TableBackupProcedure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f6e3a63
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f6e3a63
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f6e3a63

Branch: refs/heads/HBASE-7912
Commit: 8f6e3a63fdf671cbe2f6ac27bd9f4cbf2e4cff53
Parents: e01ac71
Author: tedyu 
Authored: Wed Sep 7 12:34:56 2016 -0700
Committer: tedyu 
Committed: Wed Sep 7 12:34:56 2016 -0700

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   2 +-
 .../hadoop/hbase/protobuf/RequestConverter.java |   3 +-
 .../hbase/protobuf/generated/MasterProtos.java  | 481 +--
 hbase-protocol/src/main/protobuf/Master.proto   |   2 +
 .../org/apache/hadoop/hbase/master/HMaster.java |  14 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |   6 +-
 .../hadoop/hbase/master/MasterServices.java |   9 +-
 .../hadoop/hbase/master/TestCatalogJanitor.java |   6 +-
 8 files changed, 358 insertions(+), 165 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8f6e3a63/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 2b1dcd1..9245cdb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -1587,7 +1587,7 @@ public class HBaseAdmin implements Admin {
 public BackupTablesResponse call(int callTimeout) throws 
ServiceException {
   BackupTablesRequest request = 
RequestConverter.buildBackupTablesRequest(
 userRequest.getBackupType(), userRequest.getTableList(), 
userRequest.getTargetRootDir(),
-userRequest.getWorkers(), userRequest.getBandwidth());
+userRequest.getWorkers(), userRequest.getBandwidth(), 
ng.getNonceGroup(),ng.newNonce());
   return master.backupTables(null, request);
 }
   }, (int) backupWaitTimeout);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8f6e3a63/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
index eba01ce..031ad54 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
@@ -1273,7 +1273,7 @@ public final class RequestConverter {
 
   public static BackupTablesRequest buildBackupTablesRequest(
   final BackupType type, List tableList, String targetRootDir, 
final int workers,
-  final long bandwidth) {
+  final long bandwidth, final long nonceGroup, final long nonce) {
 BackupTablesRequest.Builder builder = BackupTablesRequest.newBuilder();
 builder.setType(ProtobufUtil.toProtoBackupType(type));
 builder.setTargetRootDir(targetRootDir);
@@ -1284,6 +1284,7 @@ public final class RequestConverter {
 builder.addTables(ProtobufUtil.toProtoTableName(table));
   }
 }
+builder.setNonceGroup(nonceGroup).setNonce(nonce);
 return builder.build();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8f6e3a63/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 8b383c7..f276b7a 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -59369,6 +59369,26 @@ public final class MasterProtos {
  * optional int64 bandwidth = 5;
  */
 long getBandwidth();
+
+// optional uint64 nonce_group = 6 [default = 0];
+/**
+ * optional uint64 nonce_group = 6 [default = 0];
+ */
+boolean hasNonceGroup();
+/**
+ * optional uint64 nonce_group = 6 [default = 0];
+ */
+long getNonceGroup();
+
+// optional uint64 nonce = 7 [default = 0];
+/**
+ * optional uint64 nonce = 7 [default = 0];
+ */
+

hbase git commit: HBASE-16572 Sync method in RecoverableZooKeeper failed to pass callback function in (Allan Yang)

2016-09-07 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 6f73ef2df -> 354706a7d


HBASE-16572 Sync method in RecoverableZooKeeper failed to pass callback 
function in (Allan Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/354706a7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/354706a7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/354706a7

Branch: refs/heads/branch-1
Commit: 354706a7d818e202f8db5e9f4c6c1fa8dba6fd93
Parents: 6f73ef2
Author: tedyu 
Authored: Wed Sep 7 11:36:18 2016 -0700
Committer: tedyu 
Committed: Wed Sep 7 11:36:18 2016 -0700

--
 .../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java| 2 +-
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java  | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/354706a7/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
index e74aeb4..1a7d6a0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
@@ -763,7 +763,7 @@ public class RecoverableZooKeeper {
   }
 
   public void sync(String path, AsyncCallback.VoidCallback cb, Object ctx) 
throws KeeperException {
-checkZk().sync(path, null, null);
+checkZk().sync(path, cb, null);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/354706a7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 8952a66..b1051f5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -199,6 +199,7 @@ import com.google.protobuf.Service;
 public class HMaster extends HRegionServer implements MasterServices, Server {
   private static final Log LOG = LogFactory.getLog(HMaster.class.getName());
 
+
   /**
* Protection against zombie master. Started once Master accepts active 
responsibility and
* starts taking over responsibilities. Allows a finite time window before 
giving up ownership.



hbase git commit: HBASE-16572 Sync method in RecoverableZooKeeper failed to pass callback function in (Allan Yang)

2016-09-07 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 7363a7666 -> e9cfbfd10


HBASE-16572 Sync method in RecoverableZooKeeper failed to pass callback 
function in (Allan Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e9cfbfd1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e9cfbfd1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e9cfbfd1

Branch: refs/heads/master
Commit: e9cfbfd107ab1e368b48b9c44b71e02a81a6ca19
Parents: 7363a76
Author: tedyu 
Authored: Wed Sep 7 11:35:35 2016 -0700
Committer: tedyu 
Committed: Wed Sep 7 11:35:35 2016 -0700

--
 .../org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java| 2 +-
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java  | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e9cfbfd1/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
index e8db4da..371279e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
@@ -763,7 +763,7 @@ public class RecoverableZooKeeper {
   }
 
   public void sync(String path, AsyncCallback.VoidCallback cb, Object ctx) 
throws KeeperException {
-checkZk().sync(path, null, null);
+checkZk().sync(path, cb, null);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/e9cfbfd1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index c643fa8..0e07ae0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -196,6 +196,7 @@ import org.mortbay.jetty.servlet.Context;
 public class HMaster extends HRegionServer implements MasterServices {
   private static final Log LOG = LogFactory.getLog(HMaster.class.getName());
 
+
   /**
* Protection against zombie master. Started once Master accepts active 
responsibility and
* starts taking over responsibilities. Allows a finite time window before 
giving up ownership.



[1/3] hbase git commit: HBASE-15565 Rewrite restore with Procedure V2

2016-09-07 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 28737d05f -> e01ac71fb


http://git-wip-us.apache.org/repos/asf/hbase/blob/e01ac71f/hbase-protocol/src/main/protobuf/Master.proto
--
diff --git a/hbase-protocol/src/main/protobuf/Master.proto 
b/hbase-protocol/src/main/protobuf/Master.proto
index 6431c73..66a11b4 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -554,6 +554,26 @@ message BackupTablesResponse {
   optional string backup_id = 2;
 }
 
+enum RestoreTablesState {
+  VALIDATION = 1;
+  RESTORE_IMAGES = 2;
+}
+
+message RestoreTablesRequest {
+  required string backup_id = 1;
+  repeated TableName tables = 2;
+  repeated TableName target_tables = 3;
+  required string backup_root_dir = 4;
+  optional bool dependency_check_only = 5;
+  optional bool overwrite = 6;
+  optional uint64 nonce_group = 7 [default = 0];
+  optional uint64 nonce = 8 [default = 0];
+}
+
+message RestoreTablesResponse {
+  optional uint64 proc_id = 1;
+}
+
 service MasterService {
   /** Used by the client to get the number of regions that have received the 
updated schema */
   rpc GetSchemaAlterStatus(GetSchemaAlterStatusRequest)
@@ -832,4 +852,8 @@ service MasterService {
   /** backup table set */
   rpc backupTables(BackupTablesRequest)
 returns(BackupTablesResponse);
+
+  /** restore table set */
+  rpc restoreTables(RestoreTablesRequest)
+returns(RestoreTablesResponse);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/e01ac71f/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
index c66ac6e..3fd0c33 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
@@ -30,6 +30,8 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.backup.impl.BackupRestoreConstants;
 import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 import org.apache.hadoop.hbase.backup.util.BackupServerUtil;
+import org.apache.hadoop.hbase.backup.util.RestoreServerUtil;
+import org.apache.hadoop.hbase.client.BackupAdmin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@@ -152,11 +154,10 @@ public class RestoreDriver extends AbstractHBaseTool {
   return -4;
 }
 
-
-RestoreClient client = 
BackupRestoreClientFactory.getRestoreClient(getConf());
-try{
-  client.restore(backupRootDir, backupId, check, sTableArray,
-tTableArray, isOverwrite);
+try (final Connection conn = ConnectionFactory.createConnection(conf);
+BackupAdmin client = conn.getAdmin().getBackupAdmin();) {
+  client.restore(RestoreServerUtil.createRestoreRequest(backupRootDir, 
backupId, check,
+  sTableArray, tTableArray, isOverwrite));
 } catch (Exception e){
   e.printStackTrace();
   return -5;

http://git-wip-us.apache.org/repos/asf/hbase/blob/e01ac71f/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
deleted file mode 100644
index 7f23ce0..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/RestoreClientImpl.java
+++ /dev/null
@@ -1,300 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.backup.impl;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.TreeSet;
-
-import 

[2/3] hbase git commit: HBASE-15565 Rewrite restore with Procedure V2

2016-09-07 Thread tedyu
http://git-wip-us.apache.org/repos/asf/hbase/blob/e01ac71f/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index b5b6b4c..8b383c7 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -90,6 +90,88 @@ public final class MasterProtos {
 // @@protoc_insertion_point(enum_scope:hbase.pb.MasterSwitchType)
   }
 
+  /**
+   * Protobuf enum {@code hbase.pb.RestoreTablesState}
+   */
+  public enum RestoreTablesState
+  implements com.google.protobuf.ProtocolMessageEnum {
+/**
+ * VALIDATION = 1;
+ */
+VALIDATION(0, 1),
+/**
+ * RESTORE_IMAGES = 2;
+ */
+RESTORE_IMAGES(1, 2),
+;
+
+/**
+ * VALIDATION = 1;
+ */
+public static final int VALIDATION_VALUE = 1;
+/**
+ * RESTORE_IMAGES = 2;
+ */
+public static final int RESTORE_IMAGES_VALUE = 2;
+
+
+public final int getNumber() { return value; }
+
+public static RestoreTablesState valueOf(int value) {
+  switch (value) {
+case 1: return VALIDATION;
+case 2: return RESTORE_IMAGES;
+default: return null;
+  }
+}
+
+public static com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static com.google.protobuf.Internal.EnumLiteMap
+internalValueMap =
+  new com.google.protobuf.Internal.EnumLiteMap() {
+public RestoreTablesState findValueByNumber(int number) {
+  return RestoreTablesState.valueOf(number);
+}
+  };
+
+public final com.google.protobuf.Descriptors.EnumValueDescriptor
+getValueDescriptor() {
+  return getDescriptor().getValues().get(index);
+}
+public final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptorForType() {
+  return getDescriptor();
+}
+public static final com.google.protobuf.Descriptors.EnumDescriptor
+getDescriptor() {
+  return 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getEnumTypes().get(1);
+}
+
+private static final RestoreTablesState[] VALUES = values();
+
+public static RestoreTablesState valueOf(
+com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+  if (desc.getType() != getDescriptor()) {
+throw new java.lang.IllegalArgumentException(
+  "EnumValueDescriptor is not for this type.");
+  }
+  return VALUES[desc.getIndex()];
+}
+
+private final int index;
+private final int value;
+
+private RestoreTablesState(int index, int value) {
+  this.index = index;
+  this.value = value;
+}
+
+// @@protoc_insertion_point(enum_scope:hbase.pb.RestoreTablesState)
+  }
+
   public interface AddColumnRequestOrBuilder
   extends com.google.protobuf.MessageOrBuilder {
 
@@ -60998,220 +61080,2464 @@ public final class MasterProtos {
 // @@protoc_insertion_point(class_scope:hbase.pb.BackupTablesResponse)
   }
 
-  /**
-   * Protobuf service {@code hbase.pb.MasterService}
-   */
-  public static abstract class MasterService
-  implements com.google.protobuf.Service {
-protected MasterService() {}
+  public interface RestoreTablesRequestOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
 
-public interface Interface {
-  /**
-   * rpc GetSchemaAlterStatus(.hbase.pb.GetSchemaAlterStatusRequest) 
returns (.hbase.pb.GetSchemaAlterStatusResponse);
-   *
-   * 
-   ** Used by the client to get the number of regions that have received 
the updated schema 
-   * 
-   */
-  public abstract void getSchemaAlterStatus(
-  com.google.protobuf.RpcController controller,
-  
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest
 request,
-  
com.google.protobuf.RpcCallback
 done);
+// required string backup_id = 1;
+/**
+ * required string backup_id = 1;
+ */
+boolean hasBackupId();
+/**
+ * required string backup_id = 1;
+ */
+java.lang.String getBackupId();
+/**
+ * required string backup_id = 1;
+ */
+com.google.protobuf.ByteString
+getBackupIdBytes();
 
-  /**
-   * rpc GetTableDescriptors(.hbase.pb.GetTableDescriptorsRequest) 
returns (.hbase.pb.GetTableDescriptorsResponse);
-   *
-   * 
-   ** Get list of TableDescriptors for requested tables. 
-   * 
-   */
-  public abstract void getTableDescriptors(
-  com.google.protobuf.RpcController controller,
-  

[3/3] hbase git commit: HBASE-15565 Rewrite restore with Procedure V2

2016-09-07 Thread tedyu
HBASE-15565 Rewrite restore with Procedure V2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e01ac71f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e01ac71f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e01ac71f

Branch: refs/heads/HBASE-7912
Commit: e01ac71fba752d1eab00a62a2961c515c551a6b7
Parents: 28737d0
Author: tedyu 
Authored: Wed Sep 7 10:27:19 2016 -0700
Committer: tedyu 
Committed: Wed Sep 7 10:27:19 2016 -0700

--
 .../backup/BackupRestoreClientFactory.java  |   55 -
 .../hadoop/hbase/backup/RestoreClient.java  |   48 -
 .../hadoop/hbase/backup/RestoreRequest.java |   18 +-
 .../org/apache/hadoop/hbase/client/Admin.java   |   16 +-
 .../apache/hadoop/hbase/client/BackupAdmin.java |   23 +-
 .../hbase/client/ConnectionImplementation.java  |7 +
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   50 +
 .../hadoop/hbase/client/HBaseBackupAdmin.java   |   10 +-
 .../hadoop/hbase/protobuf/RequestConverter.java |   24 +
 .../hbase/protobuf/generated/MasterProtos.java  | 3103 --
 hbase-protocol/src/main/protobuf/Master.proto   |   24 +
 .../hadoop/hbase/backup/RestoreDriver.java  |   11 +-
 .../hbase/backup/impl/RestoreClientImpl.java|  300 --
 .../backup/impl/RestoreTablesProcedure.java |  398 +++
 .../hbase/backup/util/RestoreServerUtil.java|   47 +
 .../org/apache/hadoop/hbase/master/HMaster.java |   33 +
 .../hadoop/hbase/master/MasterRpcServices.java  |   21 +
 .../hadoop/hbase/master/MasterServices.java |7 +
 .../procedure/TableProcedureInterface.java  |2 +-
 .../hadoop/hbase/backup/TestBackupBase.java |   16 +-
 .../hbase/backup/TestBackupDeleteRestore.java   |   13 +-
 .../hadoop/hbase/backup/TestFullRestore.java|   43 +-
 .../hbase/backup/TestIncrementalBackup.java |7 +-
 .../TestIncrementalBackupDeleteTable.java   |7 +-
 .../hadoop/hbase/backup/TestRemoteBackup.java   |5 +-
 .../hadoop/hbase/backup/TestRemoteRestore.java  |5 +-
 .../hbase/backup/TestRestoreBoundaryTests.java  |   11 +-
 .../hadoop/hbase/master/TestCatalogJanitor.java |6 +
 28 files changed, 3499 insertions(+), 811 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e01ac71f/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreClientFactory.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreClientFactory.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreClientFactory.java
deleted file mode 100644
index b60ab21..000
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreClientFactory.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.backup;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.util.ReflectionUtils;
-
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public final class BackupRestoreClientFactory {
-  private static final Log LOG = 
LogFactory.getLog(BackupRestoreClientFactory.class);
-
-  private BackupRestoreClientFactory(){
-throw new AssertionError("Instantiating utility class...");
-  }
-  
-
-  /**
-   * Gets restore client implementation
-   * @param conf - configuration
-   * @return backup client
-   */
-  public static RestoreClient getRestoreClient(Configuration conf) {
-try{
-  Class cls =
-
conf.getClassByName("org.apache.hadoop.hbase.backup.impl.RestoreClientImpl");
- 
-  RestoreClient client = (RestoreClient) ReflectionUtils.newInstance(cls, 
conf);
-  client.setConf(conf);
-  return client;
-} catch(Exception e){
- 

hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/0.98 daf57832d -> 66a82af9f


HBASE-16562 ITBLL should fail to start if misconfigured


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/66a82af9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/66a82af9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/66a82af9

Branch: refs/heads/0.98
Commit: 66a82af9f47230b1972a32bea50beb785fb7997a
Parents: daf5783
Author: chenheng 
Authored: Tue Sep 6 11:02:18 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 16:33:32 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 34 ++--
 1 file changed, 24 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/66a82af9/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index d0df043..8f7539b 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -218,6 +218,11 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 
 private static final Log LOG = LogFactory.getLog(Generator.class);
 
+public static final String USAGE =  "Usage : " + 
Generator.class.getSimpleName() +
+"[ 
 \n" +
+"where  should be a multiple of width*wrap 
multiplier, " +
+"25M by default \n";
+
 static class GeneratorInputFormat extends 
InputFormat {
   static class GeneratorInputSplit extends InputSplit implements Writable {
 @Override
@@ -434,21 +439,20 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 @Override
 public int run(String[] args) throws Exception {
   if (args.length < 3) {
-System.out.println("Usage : " + Generator.class.getSimpleName() +
-"[ 
]");
-System.out.println("   where  should be a multiple 
of " +
-" width*wrap multiplier, 25M by default");
-return 0;
+System.err.println(USAGE);
+return 1;
   }
 
   int numMappers = Integer.parseInt(args[0]);
   long numNodes = Long.parseLong(args[1]);
   Path tmpOutput = new Path(args[2]);
   Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
-  Integer wrapMuplitplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
-  return run(numMappers, numNodes, tmpOutput, width, wrapMuplitplier);
+  Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
+  return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier);
 }
 
+
+
 protected void createSchema() throws IOException {
   Configuration conf = getConf();
   HBaseAdmin admin = new HBaseAdmin(conf);
@@ -549,12 +553,22 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 }
 
 public int run(int numMappers, long numNodes, Path tmpOutput,
-Integer width, Integer wrapMuplitplier) throws Exception {
-  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMuplitplier);
+Integer width, Integer wrapMultiplier) throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
+  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier);
   if (ret > 0) {
 return ret;
   }
-  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMuplitplier);
+  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMultiplier);
 }
   }
 



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1.0 fba13a6ef -> a55842a0a


HBASE-16562 ITBLL should fail to start if misconfigured


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a55842a0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a55842a0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a55842a0

Branch: refs/heads/branch-1.0
Commit: a55842a0a86040545eff6692317191acb84032ae
Parents: fba13a6
Author: chenheng 
Authored: Tue Sep 6 11:02:18 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 16:04:18 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 34 ++--
 1 file changed, 24 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a55842a0/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 99be272..b0c5371 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -239,6 +239,11 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 
 private static final Log LOG = LogFactory.getLog(Generator.class);
 
+public static final String USAGE =  "Usage : " + 
Generator.class.getSimpleName() +
+"[ 
 \n" +
+"where  should be a multiple of width*wrap 
multiplier, " +
+"25M by default \n";
+
 static class GeneratorInputFormat extends 
InputFormat {
   static class GeneratorInputSplit extends InputSplit implements Writable {
 @Override
@@ -461,21 +466,20 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 @Override
 public int run(String[] args) throws Exception {
   if (args.length < 3) {
-System.out.println("Usage : " + Generator.class.getSimpleName() +
-"[ 
]");
-System.out.println("   where  should be a multiple 
of " +
-" width*wrap multiplier, 25M by default");
-return 0;
+System.err.println(USAGE);
+return 1;
   }
 
   int numMappers = Integer.parseInt(args[0]);
   long numNodes = Long.parseLong(args[1]);
   Path tmpOutput = new Path(args[2]);
   Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
-  Integer wrapMuplitplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
-  return run(numMappers, numNodes, tmpOutput, width, wrapMuplitplier);
+  Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
+  return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier);
 }
 
+
+
 protected void createSchema() throws IOException {
   Configuration conf = getConf();
   Admin admin = new HBaseAdmin(conf);
@@ -575,12 +579,22 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 }
 
 public int run(int numMappers, long numNodes, Path tmpOutput,
-Integer width, Integer wrapMuplitplier) throws Exception {
-  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMuplitplier);
+Integer width, Integer wrapMultiplier) throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
+  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier);
   if (ret > 0) {
 return ret;
   }
-  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMuplitplier);
+  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMultiplier);
 }
   }
 



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 6f747178f -> 6e076d940


HBASE-16562 ITBLL should fail to start if misconfigured


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e076d94
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e076d94
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e076d94

Branch: refs/heads/branch-1.1
Commit: 6e076d940f8882d6ac096f6bb2ebdf05feeb4218
Parents: 6f74717
Author: chenheng 
Authored: Tue Sep 6 11:02:18 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 15:56:30 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 35 ++--
 1 file changed, 25 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6e076d94/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index f3d8360..a06e91d 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -116,6 +116,7 @@ import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.jruby.RubyProcess;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -257,6 +258,11 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 public static final String MULTIPLE_UNEVEN_COLUMNFAMILIES_KEY =
 "generator.multiple.columnfamilies";
 
+public static final String USAGE =  "Usage : " + 
Generator.class.getSimpleName() +
+"[ 
 \n" +
+"where  should be a multiple of width*wrap 
multiplier, " +
+"25M by default \n";
+
 static class GeneratorInputFormat extends 
InputFormat {
   static class GeneratorInputSplit extends InputSplit implements Writable {
 @Override
@@ -499,21 +505,20 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 @Override
 public int run(String[] args) throws Exception {
   if (args.length < 3) {
-System.out.println("Usage : " + Generator.class.getSimpleName() +
-"[ 
]");
-System.out.println("   where  should be a multiple 
of " +
-" width*wrap multiplier, 25M by default");
-return 0;
+System.err.println(USAGE);
+return 1;
   }
 
   int numMappers = Integer.parseInt(args[0]);
   long numNodes = Long.parseLong(args[1]);
   Path tmpOutput = new Path(args[2]);
   Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
-  Integer wrapMuplitplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
-  return run(numMappers, numNodes, tmpOutput, width, wrapMuplitplier);
+  Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
+  return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier);
 }
 
+
+
 protected void createSchema() throws IOException {
   Configuration conf = getConf();
   TableName tableName = getTableName(conf);
@@ -619,12 +624,22 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 }
 
 public int run(int numMappers, long numNodes, Path tmpOutput,
-Integer width, Integer wrapMuplitplier) throws Exception {
-  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMuplitplier);
+Integer width, Integer wrapMultiplier) throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
+  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier);
   if (ret > 0) {
 return ret;
   }
-  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMuplitplier);
+  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMultiplier);
 }
   }
 



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 0bdcfd90e -> a13f31aa7


HBASE-16562 ITBLL should fail to start if misconfigured


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a13f31aa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a13f31aa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a13f31aa

Branch: refs/heads/branch-1.2
Commit: a13f31aa7e83fd7b5ae6186f8ca9899427d0ef0d
Parents: 0bdcfd9
Author: chenheng 
Authored: Tue Sep 6 11:02:18 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 15:03:54 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 35 ++--
 1 file changed, 25 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a13f31aa/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 7f0f732..880b13f 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -116,6 +116,7 @@ import 
org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.jruby.RubyProcess;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -257,6 +258,11 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 public static final String MULTIPLE_UNEVEN_COLUMNFAMILIES_KEY =
 "generator.multiple.columnfamilies";
 
+public static final String USAGE =  "Usage : " + 
Generator.class.getSimpleName() +
+"[ 
 \n" +
+"where  should be a multiple of width*wrap 
multiplier, " +
+"25M by default \n";
+
 static class GeneratorInputFormat extends 
InputFormat {
   static class GeneratorInputSplit extends InputSplit implements Writable {
 @Override
@@ -498,21 +504,20 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 @Override
 public int run(String[] args) throws Exception {
   if (args.length < 3) {
-System.out.println("Usage : " + Generator.class.getSimpleName() +
-"[ 
]");
-System.out.println("   where  should be a multiple 
of " +
-" width*wrap multiplier, 25M by default");
-return 0;
+System.err.println(USAGE);
+return 1;
   }
 
   int numMappers = Integer.parseInt(args[0]);
   long numNodes = Long.parseLong(args[1]);
   Path tmpOutput = new Path(args[2]);
   Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
-  Integer wrapMuplitplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
-  return run(numMappers, numNodes, tmpOutput, width, wrapMuplitplier);
+  Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
+  return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier);
 }
 
+
+
 protected void createSchema() throws IOException {
   Configuration conf = getConf();
   TableName tableName = getTableName(conf);
@@ -624,12 +629,22 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 }
 
 public int run(int numMappers, long numNodes, Path tmpOutput,
-Integer width, Integer wrapMuplitplier) throws Exception {
-  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMuplitplier);
+Integer width, Integer wrapMultiplier) throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
+  int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier);
   if (ret > 0) {
 return ret;
   }
-  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMuplitplier);
+  return runGenerator(numMappers, numNodes, tmpOutput, width, 
wrapMultiplier);
 }
   }
 



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured, addendum

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 046123ff1 -> f7b4ecb46


HBASE-16562 ITBLL should fail to start if misconfigured, addendum


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f7b4ecb4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f7b4ecb4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f7b4ecb4

Branch: refs/heads/branch-1.3
Commit: f7b4ecb4685937e9a12b07e2677ed4f59f4dfd7d
Parents: 046123f
Author: chenheng 
Authored: Wed Sep 7 15:29:55 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 15:49:02 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 21 ++--
 1 file changed, 10 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f7b4ecb4/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 366fc02..87ac1f7 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -678,17 +678,6 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
 Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
 Integer numWalkers = (args.length < 6) ? null : 
Integer.parseInt(args[5]);
-
-long wrap = (long)width*wrapMultiplier;
-if (wrap < numNodes && numNodes % wrap != 0) {
-  /**
-   *  numNodes should be a multiple of width*wrapMultiplier.
-   *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
-   *  See {@link GeneratorMapper#setup(Mapper.Context)}
-   * */
-  System.err.println(USAGE);
-  return 1;
-}
 return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier, 
numWalkers);
   } catch (NumberFormatException e) {
 System.err.println("Parsing generator arguments failed: " + 
e.getMessage());
@@ -813,6 +802,16 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 public int run(int numMappers, long numNodes, Path tmpOutput,
 Integer width, Integer wrapMultiplier, Integer numWalkers)
 throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
   int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier,
   numWalkers);
   if (ret > 0) {



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured, addendum

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1 38b946c27 -> 6f73ef2df


HBASE-16562 ITBLL should fail to start if misconfigured, addendum


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6f73ef2d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6f73ef2d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6f73ef2d

Branch: refs/heads/branch-1
Commit: 6f73ef2dff46692ede976621f3e4b9e5cfae01fa
Parents: 38b946c
Author: chenheng 
Authored: Wed Sep 7 15:29:55 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 15:45:09 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 21 ++--
 1 file changed, 10 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6f73ef2d/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 366fc02..87ac1f7 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -678,17 +678,6 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
 Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
 Integer numWalkers = (args.length < 6) ? null : 
Integer.parseInt(args[5]);
-
-long wrap = (long)width*wrapMultiplier;
-if (wrap < numNodes && numNodes % wrap != 0) {
-  /**
-   *  numNodes should be a multiple of width*wrapMultiplier.
-   *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
-   *  See {@link GeneratorMapper#setup(Mapper.Context)}
-   * */
-  System.err.println(USAGE);
-  return 1;
-}
 return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier, 
numWalkers);
   } catch (NumberFormatException e) {
 System.err.println("Parsing generator arguments failed: " + 
e.getMessage());
@@ -813,6 +802,16 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 public int run(int numMappers, long numNodes, Path tmpOutput,
 Integer width, Integer wrapMultiplier, Integer numWalkers)
 throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
   int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier,
   numWalkers);
   if (ret > 0) {



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured, addendum

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/master 78af20944 -> 7363a7666


HBASE-16562 ITBLL should fail to start if misconfigured, addendum


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7363a766
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7363a766
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7363a766

Branch: refs/heads/master
Commit: 7363a76660583492c5c037113ac06cfef92e034e
Parents: 78af209
Author: chenheng 
Authored: Wed Sep 7 15:29:55 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 15:29:55 2016 +0800

--
 .../test/IntegrationTestBigLinkedList.java  | 21 ++--
 1 file changed, 10 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7363a766/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 41aa128..07c5cf2 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -676,17 +676,6 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
 Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
 Integer numWalkers = (args.length < 6) ? null : 
Integer.parseInt(args[5]);
-
-long wrap = (long)width*wrapMultiplier;
-if (wrap < numNodes && numNodes % wrap != 0) {
-  /**
-   *  numNodes should be a multiple of width*wrapMultiplier.
-   *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
-   *  See {@link GeneratorMapper#setup(Mapper.Context)}
-   * */
-  System.err.println(USAGE);
-  return 1;
-}
 return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier, 
numWalkers);
   } catch (NumberFormatException e) {
 System.err.println("Parsing generator arguments failed: " + 
e.getMessage());
@@ -818,6 +807,16 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 public int run(int numMappers, long numNodes, Path tmpOutput,
 Integer width, Integer wrapMultiplier, Integer numWalkers)
 throws Exception {
+  long wrap = (long)width*wrapMultiplier;
+  if (wrap < numNodes && numNodes % wrap != 0) {
+/**
+ *  numNodes should be a multiple of width*wrapMultiplier.
+ *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+ *  See {@link GeneratorMapper#setup(Mapper.Context)}
+ * */
+System.err.println(USAGE);
+return 1;
+  }
   int ret = runRandomInputGenerator(numMappers, numNodes, tmpOutput, 
width, wrapMultiplier,
   numWalkers);
   if (ret > 0) {



hbase git commit: HBASE-16562 ITBLL should fail to start if misconfigured

2016-09-07 Thread chenheng
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 2530258c1 -> 046123ff1


HBASE-16562 ITBLL should fail to start if misconfigured


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/046123ff
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/046123ff
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/046123ff

Branch: refs/heads/branch-1.3
Commit: 046123ff14586e6d6d1ec562cd6064ee9ab10edb
Parents: 2530258
Author: chenheng 
Authored: Tue Sep 6 11:02:18 2016 +0800
Committer: chenheng 
Committed: Wed Sep 7 13:58:36 2016 +0800

--
 .../hadoop/hbase/test/IntegrationTestBigLinkedList.java  | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/046123ff/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 2d828f4..366fc02 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -678,6 +678,17 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
 Integer width = (args.length < 4) ? null : Integer.parseInt(args[3]);
 Integer wrapMultiplier = (args.length < 5) ? null : 
Integer.parseInt(args[4]);
 Integer numWalkers = (args.length < 6) ? null : 
Integer.parseInt(args[5]);
+
+long wrap = (long)width*wrapMultiplier;
+if (wrap < numNodes && numNodes % wrap != 0) {
+  /**
+   *  numNodes should be a multiple of width*wrapMultiplier.
+   *  If numNodes less than wrap, wrap will be set to be equal with 
numNodes,
+   *  See {@link GeneratorMapper#setup(Mapper.Context)}
+   * */
+  System.err.println(USAGE);
+  return 1;
+}
 return run(numMappers, numNodes, tmpOutput, width, wrapMultiplier, 
numWalkers);
   } catch (NumberFormatException e) {
 System.err.println("Parsing generator arguments failed: " + 
e.getMessage());