hbase git commit: HBASE-18056 Make the default behavior of CompactionPipeline to merge it segments into one, due to better read performance in this case
Repository: hbase Updated Branches: refs/heads/master 8b70d043e -> 1520c8fd4 HBASE-18056 Make the default behavior of CompactionPipeline to merge it segments into one, due to better read performance in this case Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1520c8fd Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1520c8fd Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1520c8fd Branch: refs/heads/master Commit: 1520c8fd4dd70206ab1abdf3eed81d6dc302990b Parents: 8b70d04 Author: anastasAuthored: Sun May 21 12:27:57 2017 +0300 Committer: anastas Committed: Sun May 21 12:27:57 2017 +0300 -- .../org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1520c8fd/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java index 0d3f47e..08af7fe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java @@ -50,7 +50,7 @@ public class MemStoreCompactor { public static final String COMPACTING_MEMSTORE_THRESHOLD_KEY = "hbase.hregion.compacting.pipeline.segments.limit"; // remaining with the same ("infinity") but configurable default for now - public static final int COMPACTING_MEMSTORE_THRESHOLD_DEFAULT = 30; + public static final int COMPACTING_MEMSTORE_THRESHOLD_DEFAULT = 1; public static final long DEEP_OVERHEAD = ClassSize .align(ClassSize.OBJECT
hbase git commit: HBASE-18035 Meta replica does not give any primaryOperationTimeout to primary meta region (huaxiang sun)
Repository: hbase Updated Branches: refs/heads/branch-1 300c5388f -> c03f003f4 HBASE-18035 Meta replica does not give any primaryOperationTimeout to primary meta region (huaxiang sun) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c03f003f Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c03f003f Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c03f003f Branch: refs/heads/branch-1 Commit: c03f003f4f0301091fdcb1753691bcf7d46bd38c Parents: 300c538 Author: tedyuAuthored: Sun May 21 06:39:33 2017 -0700 Committer: tedyu Committed: Sun May 21 06:39:33 2017 -0700 -- .../hbase/client/ConnectionConfiguration.java | 18 +++-- .../hadoop/hbase/client/ConnectionManager.java | 17 - .../org/apache/hadoop/hbase/HConstants.java | 7 ++ .../hbase/client/TestReplicaWithCluster.java| 76 4 files changed, 112 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/c03f003f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java index de760d4..76dcdfa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java @@ -40,6 +40,7 @@ public class ConnectionConfiguration { private final long scannerMaxResultSize; private final int primaryCallTimeoutMicroSecond; private final int replicaCallTimeoutMicroSecondScan; + private final int metaReplicaCallTimeoutMicroSecondScan; private final int retries; private final int maxKeyValueSize; @@ -50,9 +51,8 @@ public class ConnectionConfiguration { ConnectionConfiguration(Configuration conf) { this.writeBufferSize = conf.getLong(WRITE_BUFFER_SIZE_KEY, WRITE_BUFFER_SIZE_DEFAULT); -this.metaOperationTimeout = conf.getInt( - HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT, - HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); +this.metaOperationTimeout = conf.getInt(HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT, +HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); this.operationTimeout = conf.getInt( HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); @@ -62,7 +62,7 @@ public class ConnectionConfiguration { this.scannerMaxResultSize = conf.getLong(HConstants.HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE_KEY, - HConstants.DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE); +HConstants.DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE); this.primaryCallTimeoutMicroSecond = conf.getInt("hbase.client.primaryCallTimeout.get", 1); // 10ms @@ -70,6 +70,10 @@ public class ConnectionConfiguration { this.replicaCallTimeoutMicroSecondScan = conf.getInt("hbase.client.replicaCallTimeout.scan", 100); // 1000 ms +this.metaReplicaCallTimeoutMicroSecondScan = +conf.getInt(HConstants.HBASE_CLIENT_MEAT_REPLICA_SCAN_TIMEOUT, +HConstants.HBASE_CLIENT_MEAT_REPLICA_SCAN_TIMEOUT_DEFAULT); + this.retries = conf.getInt( HConstants.HBASE_CLIENT_RETRIES_NUMBER, HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER); @@ -90,6 +94,8 @@ public class ConnectionConfiguration { this.scannerMaxResultSize = HConstants.DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE; this.primaryCallTimeoutMicroSecond = 1; this.replicaCallTimeoutMicroSecondScan = 100; +this.metaReplicaCallTimeoutMicroSecondScan = +HConstants.HBASE_CLIENT_MEAT_REPLICA_SCAN_TIMEOUT_DEFAULT; this.retries = HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER; this.maxKeyValueSize = MAX_KEYVALUE_SIZE_DEFAULT; } @@ -118,6 +124,10 @@ public class ConnectionConfiguration { return replicaCallTimeoutMicroSecondScan; } + public int getMetaReplicaCallTimeoutMicroSecondScan() { +return metaReplicaCallTimeoutMicroSecondScan; + } + public int getRetriesNumber() { return retries; } http://git-wip-us.apache.org/repos/asf/hbase/blob/c03f003f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java index 60d4217..aa44070 100644 ---
hbase git commit: HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken
Repository: hbase Updated Branches: refs/heads/branch-1.2 5c82c8236 -> 71bf5afa3 HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/71bf5afa Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/71bf5afa Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/71bf5afa Branch: refs/heads/branch-1.2 Commit: 71bf5afa37343245017ab755a59d9fc3fe3aa6ce Parents: 5c82c82 Author: zhangduoAuthored: Sat May 20 21:58:45 2017 +0800 Committer: zhangduo Committed: Sun May 21 22:03:58 2017 +0800 -- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 48 .../hadoop/hbase/ipc/AbstractTestIPC.java | 4 +- .../ipc/TestRpcServerSlowConnectionSetup.java | 116 +++ 3 files changed, 146 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/71bf5afa/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 2fde8a8..45b0c18 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -1192,6 +1192,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { protected SocketChannel channel; private ByteBuffer data; private ByteBuffer dataLengthBuffer; +private ByteBuffer preambleBuffer; protected final ConcurrentLinkedDeque responseQueue = new ConcurrentLinkedDeque(); private final Lock responseWriteLock = new ReentrantLock(); private Counter rpcCount = new Counter(); // number of outstanding rpcs @@ -1480,23 +1481,25 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } private int readPreamble() throws IOException { - int count; - // Check for 'HBas' magic. - this.dataLengthBuffer.flip(); - if (!Arrays.equals(HConstants.RPC_HEADER, dataLengthBuffer.array())) { -return doBadPreambleHandling("Expected HEADER=" + -Bytes.toStringBinary(HConstants.RPC_HEADER) + -" but received HEADER=" + Bytes.toStringBinary(dataLengthBuffer.array()) + -" from " + toString()); - } - // Now read the next two bytes, the version and the auth to use. - ByteBuffer versionAndAuthBytes = ByteBuffer.allocate(2); - count = channelRead(channel, versionAndAuthBytes); - if (count < 0 || versionAndAuthBytes.remaining() > 0) { + if (preambleBuffer == null) { +preambleBuffer = ByteBuffer.allocate(6); + } + int count = channelRead(channel, preambleBuffer); + if (count < 0 || preambleBuffer.remaining() > 0) { return count; } - int version = versionAndAuthBytes.get(0); - byte authbyte = versionAndAuthBytes.get(1); + // Check for 'HBas' magic. + preambleBuffer.flip(); + for (int i = 0; i < HConstants.RPC_HEADER.length; i++) { +if (HConstants.RPC_HEADER[i] != preambleBuffer.get(i)) { + return doBadPreambleHandling("Expected HEADER=" + + Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + + Bytes.toStringBinary(preambleBuffer.array(), 0, HConstants.RPC_HEADER.length) + + " from " + toString()); +} + } + int version = preambleBuffer.get(HConstants.RPC_HEADER.length); + byte authbyte = preambleBuffer.get(HConstants.RPC_HEADER.length + 1); this.authMethod = AuthMethod.valueOf(authbyte); if (version != CURRENT_VERSION) { String msg = getFatalConnectionString(version, authbyte); @@ -1530,7 +1533,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { useSasl = true; } - dataLengthBuffer.clear(); + preambleBuffer = null; // do not need it anymore connectionPreambleRead = true; return count; } @@ -1552,10 +1555,15 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { * @throws InterruptedException */ public int readAndProcess() throws IOException, InterruptedException { - // Try and read in an int. If new connection, the int will hold the 'HBas' HEADER. If it - // does, read in the rest of the connection preamble, the version and the auth method. - // Else it will be length of the data to read (or -1 if a ping). We catch the integer - // length into the 4-byte this.dataLengthBuffer. + // If we have not read the
hbase git commit: HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken
Repository: hbase Updated Branches: refs/heads/branch-1.1 10bb19bad -> 2cbdad412 HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2cbdad41 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2cbdad41 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2cbdad41 Branch: refs/heads/branch-1.1 Commit: 2cbdad412db8c4a9405016f3cc3e59d3af0e29fa Parents: 10bb19b Author: zhangduoAuthored: Sat May 20 21:58:45 2017 +0800 Committer: zhangduo Committed: Sun May 21 22:05:18 2017 +0800 -- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 48 .../hadoop/hbase/ipc/AbstractTestIPC.java | 4 +- .../ipc/TestRpcServerSlowConnectionSetup.java | 116 +++ 3 files changed, 146 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/2cbdad41/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 9fa8395..f634f00 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -1212,6 +1212,7 @@ public class RpcServer implements RpcServerInterface { protected SocketChannel channel; private ByteBuffer data; private ByteBuffer dataLengthBuffer; +private ByteBuffer preambleBuffer; protected final ConcurrentLinkedDeque responseQueue = new ConcurrentLinkedDeque(); private final Lock responseWriteLock = new ReentrantLock(); private Counter rpcCount = new Counter(); // number of outstanding rpcs @@ -1492,23 +1493,25 @@ public class RpcServer implements RpcServerInterface { } private int readPreamble() throws IOException { - int count; - // Check for 'HBas' magic. - this.dataLengthBuffer.flip(); - if (!Arrays.equals(HConstants.RPC_HEADER, dataLengthBuffer.array())) { -return doBadPreambleHandling("Expected HEADER=" + -Bytes.toStringBinary(HConstants.RPC_HEADER) + -" but received HEADER=" + Bytes.toStringBinary(dataLengthBuffer.array()) + -" from " + toString()); - } - // Now read the next two bytes, the version and the auth to use. - ByteBuffer versionAndAuthBytes = ByteBuffer.allocate(2); - count = channelRead(channel, versionAndAuthBytes); - if (count < 0 || versionAndAuthBytes.remaining() > 0) { + if (preambleBuffer == null) { +preambleBuffer = ByteBuffer.allocate(6); + } + int count = channelRead(channel, preambleBuffer); + if (count < 0 || preambleBuffer.remaining() > 0) { return count; } - int version = versionAndAuthBytes.get(0); - byte authbyte = versionAndAuthBytes.get(1); + // Check for 'HBas' magic. + preambleBuffer.flip(); + for (int i = 0; i < HConstants.RPC_HEADER.length; i++) { +if (HConstants.RPC_HEADER[i] != preambleBuffer.get(i)) { + return doBadPreambleHandling("Expected HEADER=" + + Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + + Bytes.toStringBinary(preambleBuffer.array(), 0, HConstants.RPC_HEADER.length) + + " from " + toString()); +} + } + int version = preambleBuffer.get(HConstants.RPC_HEADER.length); + byte authbyte = preambleBuffer.get(HConstants.RPC_HEADER.length + 1); this.authMethod = AuthMethod.valueOf(authbyte); if (version != CURRENT_VERSION) { String msg = getFatalConnectionString(version, authbyte); @@ -1537,7 +1540,7 @@ public class RpcServer implements RpcServerInterface { useSasl = true; } - dataLengthBuffer.clear(); + preambleBuffer = null; // do not need it anymore connectionPreambleRead = true; return count; } @@ -1559,10 +1562,15 @@ public class RpcServer implements RpcServerInterface { * @throws InterruptedException */ public int readAndProcess() throws IOException, InterruptedException { - // Try and read in an int. If new connection, the int will hold the 'HBas' HEADER. If it - // does, read in the rest of the connection preamble, the version and the auth method. - // Else it will be length of the data to read (or -1 if a ping). We catch the integer - // length into the 4-byte this.dataLengthBuffer. + // If we have not read the connection setup preamble, look to see if that is on the wire. + if (!connectionPreambleRead) {
hbase git commit: HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken
Repository: hbase Updated Branches: refs/heads/master 1520c8fd4 -> 1ceb25cf0 HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ceb25cf Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ceb25cf Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ceb25cf Branch: refs/heads/master Commit: 1ceb25cf09f5057bb9ec23eef90373c8febbc6e2 Parents: 1520c8f Author: zhangduoAuthored: Fri May 19 22:12:00 2017 +0800 Committer: zhangduo Committed: Sun May 21 20:36:33 2017 +0800 -- .../hbase/ipc/SimpleServerRpcConnection.java| 54 .../ipc/TestRpcServerSlowConnectionSetup.java | 136 +++ 2 files changed, 161 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1ceb25cf/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java index 50a1a6b..b2507d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java @@ -63,6 +63,7 @@ class SimpleServerRpcConnection extends ServerRpcConnection { final SocketChannel channel; private ByteBuff data; private ByteBuffer dataLengthBuffer; + private ByteBuffer preambleBuffer; protected final ConcurrentLinkedDeque responseQueue = new ConcurrentLinkedDeque<>(); final Lock responseWriteLock = new ReentrantLock(); @@ -130,22 +131,25 @@ class SimpleServerRpcConnection extends ServerRpcConnection { } private int readPreamble() throws IOException { -int count; -// Check for 'HBas' magic. -this.dataLengthBuffer.flip(); -if (!Arrays.equals(HConstants.RPC_HEADER, dataLengthBuffer.array())) { - return doBadPreambleHandling( -"Expected HEADER=" + Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + -Bytes.toStringBinary(dataLengthBuffer.array()) + " from " + toString()); +if (preambleBuffer == null) { + preambleBuffer = ByteBuffer.allocate(6); } -// Now read the next two bytes, the version and the auth to use. -ByteBuffer versionAndAuthBytes = ByteBuffer.allocate(2); -count = this.rpcServer.channelRead(channel, versionAndAuthBytes); -if (count < 0 || versionAndAuthBytes.remaining() > 0) { +int count = this.rpcServer.channelRead(channel, preambleBuffer); +if (count < 0 || preambleBuffer.remaining() > 0) { return count; } -int version = versionAndAuthBytes.get(0); -byte authbyte = versionAndAuthBytes.get(1); +// Check for 'HBas' magic. +preambleBuffer.flip(); +for (int i = 0; i < HConstants.RPC_HEADER.length; i++) { + if (HConstants.RPC_HEADER[i] != preambleBuffer.get(i)) { +return doBadPreambleHandling("Expected HEADER=" + +Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + +Bytes.toStringBinary(preambleBuffer.array(), 0, HConstants.RPC_HEADER.length) + +" from " + toString()); + } +} +int version = preambleBuffer.get(HConstants.RPC_HEADER.length); +byte authbyte = preambleBuffer.get(HConstants.RPC_HEADER.length + 1); this.authMethod = AuthMethod.valueOf(authbyte); if (version != SimpleRpcServer.CURRENT_VERSION) { String msg = getFatalConnectionString(version, authbyte); @@ -178,8 +182,7 @@ class SimpleServerRpcConnection extends ServerRpcConnection { if (authMethod != AuthMethod.SIMPLE) { useSasl = true; } - -dataLengthBuffer.clear(); +preambleBuffer = null; // do not need it anymore connectionPreambleRead = true; return count; } @@ -200,26 +203,19 @@ class SimpleServerRpcConnection extends ServerRpcConnection { * @throws InterruptedException */ public int readAndProcess() throws IOException, InterruptedException { -// Try and read in an int. If new connection, the int will hold the 'HBas' HEADER. If it -// does, read in the rest of the connection preamble, the version and the auth method. -// Else it will be length of the data to read (or -1 if a ping). We catch the integer -// length into the 4-byte this.dataLengthBuffer. -int count = read4Bytes(); -if (count < 0 || dataLengthBuffer.remaining() > 0) { - return count; -} - // If we have not read the connection setup preamble, look to see if that is on the wire. if
hbase git commit: HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken
Repository: hbase Updated Branches: refs/heads/branch-1 c03f003f4 -> affd6ddf3 HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/affd6ddf Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/affd6ddf Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/affd6ddf Branch: refs/heads/branch-1 Commit: affd6ddf32a50ea63d6d47fca09e626d40646cf4 Parents: c03f003 Author: zhangduoAuthored: Sat May 20 21:21:59 2017 +0800 Committer: zhangduo Committed: Sun May 21 22:00:27 2017 +0800 -- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 48 .../ipc/TestRpcServerSlowConnectionSetup.java | 119 +++ 2 files changed, 147 insertions(+), 20 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/affd6ddf/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 06ae51d..232b0e8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -1269,6 +1269,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { protected SocketChannel channel; private ByteBuffer data; private ByteBuffer dataLengthBuffer; +private ByteBuffer preambleBuffer; protected final ConcurrentLinkedDeque responseQueue = new ConcurrentLinkedDeque(); private final Lock responseWriteLock = new ReentrantLock(); private Counter rpcCount = new Counter(); // number of outstanding rpcs @@ -1559,23 +1560,25 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } private int readPreamble() throws IOException { - int count; - // Check for 'HBas' magic. - this.dataLengthBuffer.flip(); - if (!Arrays.equals(HConstants.RPC_HEADER, dataLengthBuffer.array())) { -return doBadPreambleHandling("Expected HEADER=" + -Bytes.toStringBinary(HConstants.RPC_HEADER) + -" but received HEADER=" + Bytes.toStringBinary(dataLengthBuffer.array()) + -" from " + toString()); - } - // Now read the next two bytes, the version and the auth to use. - ByteBuffer versionAndAuthBytes = ByteBuffer.allocate(2); - count = channelRead(channel, versionAndAuthBytes); - if (count < 0 || versionAndAuthBytes.remaining() > 0) { + if (preambleBuffer == null) { +preambleBuffer = ByteBuffer.allocate(6); + } + int count = channelRead(channel, preambleBuffer); + if (count < 0 || preambleBuffer.remaining() > 0) { return count; } - int version = versionAndAuthBytes.get(0); - byte authbyte = versionAndAuthBytes.get(1); + // Check for 'HBas' magic. + preambleBuffer.flip(); + for (int i = 0; i < HConstants.RPC_HEADER.length; i++) { +if (HConstants.RPC_HEADER[i] != preambleBuffer.get(i)) { + return doBadPreambleHandling("Expected HEADER=" + + Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + + Bytes.toStringBinary(preambleBuffer.array(), 0, HConstants.RPC_HEADER.length) + + " from " + toString()); +} + } + int version = preambleBuffer.get(HConstants.RPC_HEADER.length); + byte authbyte = preambleBuffer.get(HConstants.RPC_HEADER.length + 1); this.authMethod = AuthMethod.valueOf(authbyte); if (version != CURRENT_VERSION) { String msg = getFatalConnectionString(version, authbyte); @@ -1609,7 +1612,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { useSasl = true; } - dataLengthBuffer.clear(); + preambleBuffer = null; // do not need it anymore connectionPreambleRead = true; return count; } @@ -1631,10 +1634,15 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { * @throws InterruptedException */ public int readAndProcess() throws IOException, InterruptedException { - // Try and read in an int. If new connection, the int will hold the 'HBas' HEADER. If it - // does, read in the rest of the connection preamble, the version and the auth method. - // Else it will be length of the data to read (or -1 if a ping). We catch the integer - // length into the 4-byte this.dataLengthBuffer. + // If we have not read the connection setup preamble, look to see if that is on the wire. +
hbase git commit: HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken
Repository: hbase Updated Branches: refs/heads/branch-1.3 e9aa49f65 -> 9d21e89b0 HBASE-18081 The way we process connection preamble in SimpleRpcServer is broken Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9d21e89b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9d21e89b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9d21e89b Branch: refs/heads/branch-1.3 Commit: 9d21e89b003243d6dca935659ab991d29486e423 Parents: e9aa49f Author: zhangduoAuthored: Sat May 20 21:50:21 2017 +0800 Committer: zhangduo Committed: Sun May 21 21:15:26 2017 +0800 -- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 48 .../hadoop/hbase/ipc/AbstractTestIPC.java | 4 +- .../ipc/TestRpcServerSlowConnectionSetup.java | 116 +++ 3 files changed, 146 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/9d21e89b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java -- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index c29c395..6b7ccff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -1270,6 +1270,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { protected SocketChannel channel; private ByteBuffer data; private ByteBuffer dataLengthBuffer; +private ByteBuffer preambleBuffer; protected final ConcurrentLinkedDeque responseQueue = new ConcurrentLinkedDeque(); private final Lock responseWriteLock = new ReentrantLock(); private Counter rpcCount = new Counter(); // number of outstanding rpcs @@ -1560,23 +1561,25 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { } private int readPreamble() throws IOException { - int count; - // Check for 'HBas' magic. - this.dataLengthBuffer.flip(); - if (!Arrays.equals(HConstants.RPC_HEADER, dataLengthBuffer.array())) { -return doBadPreambleHandling("Expected HEADER=" + -Bytes.toStringBinary(HConstants.RPC_HEADER) + -" but received HEADER=" + Bytes.toStringBinary(dataLengthBuffer.array()) + -" from " + toString()); - } - // Now read the next two bytes, the version and the auth to use. - ByteBuffer versionAndAuthBytes = ByteBuffer.allocate(2); - count = channelRead(channel, versionAndAuthBytes); - if (count < 0 || versionAndAuthBytes.remaining() > 0) { + if (preambleBuffer == null) { +preambleBuffer = ByteBuffer.allocate(6); + } + int count = channelRead(channel, preambleBuffer); + if (count < 0 || preambleBuffer.remaining() > 0) { return count; } - int version = versionAndAuthBytes.get(0); - byte authbyte = versionAndAuthBytes.get(1); + // Check for 'HBas' magic. + preambleBuffer.flip(); + for (int i = 0; i < HConstants.RPC_HEADER.length; i++) { +if (HConstants.RPC_HEADER[i] != preambleBuffer.get(i)) { + return doBadPreambleHandling("Expected HEADER=" + + Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + + Bytes.toStringBinary(preambleBuffer.array(), 0, HConstants.RPC_HEADER.length) + + " from " + toString()); +} + } + int version = preambleBuffer.get(HConstants.RPC_HEADER.length); + byte authbyte = preambleBuffer.get(HConstants.RPC_HEADER.length + 1); this.authMethod = AuthMethod.valueOf(authbyte); if (version != CURRENT_VERSION) { String msg = getFatalConnectionString(version, authbyte); @@ -1610,7 +1613,7 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { useSasl = true; } - dataLengthBuffer.clear(); + preambleBuffer = null; // do not need it anymore connectionPreambleRead = true; return count; } @@ -1632,10 +1635,15 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { * @throws InterruptedException */ public int readAndProcess() throws IOException, InterruptedException { - // Try and read in an int. If new connection, the int will hold the 'HBas' HEADER. If it - // does, read in the rest of the connection preamble, the version and the auth method. - // Else it will be length of the data to read (or -1 if a ping). We catch the integer - // length into the 4-byte this.dataLengthBuffer. + // If we have not read the
[12/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd. Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/1a880974 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/1a880974 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/1a880974 Branch: refs/heads/asf-site Commit: 1a8809746467d0989354ae19db540afd9201e2b4 Parents: b8f7528 Author: jenkinsAuthored: Sun May 21 14:59:04 2017 + Committer: jenkins Committed: Sun May 21 14:59:04 2017 + -- acid-semantics.html | 4 +- apache_hbase_reference_guide.pdf| 4 +- apache_hbase_reference_guide.pdfmarks | 4 +- bulk-loads.html | 4 +- checkstyle-aggregate.html | 23060 + checkstyle.rss | 4 +- coc.html| 4 +- cygwin.html | 4 +- dependencies.html | 4 +- dependency-convergence.html | 4 +- dependency-info.html| 4 +- dependency-management.html | 4 +- devapidocs/constant-values.html | 8 +- devapidocs/index-all.html | 2 + .../hadoop/hbase/backup/package-tree.html | 4 +- .../hbase/classification/package-tree.html | 6 +- .../hadoop/hbase/client/package-tree.html |24 +- .../hadoop/hbase/executor/package-tree.html | 2 +- .../hadoop/hbase/filter/package-tree.html |12 +- .../hadoop/hbase/io/hfile/package-tree.html | 6 +- .../hbase/ipc/SimpleServerRpcConnection.html|67 +- .../apache/hadoop/hbase/ipc/package-tree.html | 2 +- .../hadoop/hbase/mapreduce/package-tree.html| 4 +- .../hadoop/hbase/master/package-tree.html | 6 +- .../org/apache/hadoop/hbase/package-tree.html |10 +- .../hadoop/hbase/procedure2/package-tree.html | 6 +- .../hadoop/hbase/quotas/package-tree.html | 6 +- .../hadoop/hbase/regionserver/package-tree.html |18 +- .../regionserver/querymatcher/package-tree.html | 2 +- .../hbase/regionserver/wal/package-tree.html| 2 +- .../hadoop/hbase/rest/model/package-tree.html | 2 +- .../hbase/security/access/package-tree.html | 2 +- .../hadoop/hbase/security/package-tree.html | 2 +- .../tmpl/master/MasterStatusTmpl.ImplData.html | 270 +- .../hbase/tmpl/master/MasterStatusTmpl.html | 108 +- .../hbase/tmpl/master/MasterStatusTmplImpl.html |54 +- .../regionserver/RSStatusTmpl.ImplData.html | 120 +- .../hbase/tmpl/regionserver/RSStatusTmpl.html |48 +- .../tmpl/regionserver/RSStatusTmplImpl.html |24 +- .../apache/hadoop/hbase/util/package-tree.html |10 +- .../org/apache/hadoop/hbase/Version.html| 6 +- .../hbase/ipc/SimpleServerRpcConnection.html| 708 +- .../regionserver/MemStoreCompactor.Action.html | 2 +- .../hbase/regionserver/MemStoreCompactor.html | 2 +- .../tmpl/master/MasterStatusTmpl.ImplData.html | 270 +- .../tmpl/master/MasterStatusTmpl.Intf.html | 270 +- .../hbase/tmpl/master/MasterStatusTmpl.html | 270 +- .../hbase/tmpl/master/MasterStatusTmplImpl.html |76 +- .../regionserver/RSStatusTmpl.ImplData.html | 120 +- .../tmpl/regionserver/RSStatusTmpl.Intf.html| 120 +- .../hbase/tmpl/regionserver/RSStatusTmpl.html | 120 +- .../tmpl/regionserver/RSStatusTmplImpl.html |36 +- export_control.html | 4 +- hbase-annotations/checkstyle.html | 6 +- hbase-annotations/dependencies.html | 6 +- hbase-annotations/dependency-convergence.html | 6 +- hbase-annotations/dependency-info.html | 6 +- hbase-annotations/dependency-management.html| 6 +- hbase-annotations/index.html| 6 +- hbase-annotations/integration.html | 6 +- hbase-annotations/issue-tracking.html | 6 +- hbase-annotations/license.html | 6 +- hbase-annotations/mail-lists.html | 6 +- hbase-annotations/plugin-management.html| 6 +- hbase-annotations/plugins.html | 6 +- hbase-annotations/project-info.html | 6 +- hbase-annotations/project-reports.html | 6 +- hbase-annotations/project-summary.html | 6 +- hbase-annotations/source-repository.html| 6 +- hbase-annotations/team-list.html| 6 +- hbase-archetypes/dependencies.html | 6 +- hbase-archetypes/dependency-convergence.html| 6 +-
[01/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
Repository: hbase-site Updated Branches: refs/heads/asf-site b8f752871 -> 1a8809746 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/testdevapidocs/org/apache/hadoop/hbase/package-tree.html -- diff --git a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html index ddcfe17..5bfb09a 100644 --- a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html +++ b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html @@ -541,15 +541,15 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.IntegrationTestRegionReplicaPerf.Stat -org.apache.hadoop.hbase.IntegrationTestDDLMasterFailover.ACTION -org.apache.hadoop.hbase.RESTApiClusterManager.RoleCommand -org.apache.hadoop.hbase.HBaseClusterManager.CommandProvider.Operation org.apache.hadoop.hbase.ScanPerformanceEvaluation.ScanCounter +org.apache.hadoop.hbase.HBaseClusterManager.CommandProvider.Operation +org.apache.hadoop.hbase.IntegrationTestRegionReplicaPerf.Stat org.apache.hadoop.hbase.ClusterManager.ServiceType -org.apache.hadoop.hbase.PerformanceEvaluation.Counter +org.apache.hadoop.hbase.RESTApiClusterManager.RoleCommand org.apache.hadoop.hbase.RESTApiClusterManager.Service org.apache.hadoop.hbase.ResourceChecker.Phase +org.apache.hadoop.hbase.PerformanceEvaluation.Counter +org.apache.hadoop.hbase.IntegrationTestDDLMasterFailover.ACTION http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html -- diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html index 0de6b32..7540e04 100644 --- a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html +++ b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html @@ -205,9 +205,9 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.procedure2.TestProcedureRecovery.TestStateMachineProcedure.State org.apache.hadoop.hbase.procedure2.TestStateMachineProcedure.TestSMProcedureState org.apache.hadoop.hbase.procedure2.TestYieldProcedures.TestStateMachineProcedure.State +org.apache.hadoop.hbase.procedure2.TestProcedureRecovery.TestStateMachineProcedure.State http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html -- diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html index 90f06c0..a302db9 100644 --- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html +++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html @@ -609,10 +609,10 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.regionserver.TestAtomicOperation.TestStep -org.apache.hadoop.hbase.regionserver.TestCacheOnWriteInSchema.CacheOnWriteType org.apache.hadoop.hbase.regionserver.TestRegionServerReadRequestMetrics.Metric org.apache.hadoop.hbase.regionserver.DataBlockEncodingTool.Manipulation +org.apache.hadoop.hbase.regionserver.TestCacheOnWriteInSchema.CacheOnWriteType +org.apache.hadoop.hbase.regionserver.TestAtomicOperation.TestStep http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html -- diff --git a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
[03/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-archetypes/hbase-client-project/dependency-convergence.html -- diff --git a/hbase-archetypes/hbase-client-project/dependency-convergence.html b/hbase-archetypes/hbase-client-project/dependency-convergence.html index 75fe8f3..f375897 100644 --- a/hbase-archetypes/hbase-client-project/dependency-convergence.html +++ b/hbase-archetypes/hbase-client-project/dependency-convergence.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Exemplar for hbase-client archetype http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-archetypes/hbase-client-project/dependency-info.html -- diff --git a/hbase-archetypes/hbase-client-project/dependency-info.html b/hbase-archetypes/hbase-client-project/dependency-info.html index e6d0c32..ecdd452 100644 --- a/hbase-archetypes/hbase-client-project/dependency-info.html +++ b/hbase-archetypes/hbase-client-project/dependency-info.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Exemplar for hbase-client archetype http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-archetypes/hbase-client-project/dependency-management.html -- diff --git a/hbase-archetypes/hbase-client-project/dependency-management.html b/hbase-archetypes/hbase-client-project/dependency-management.html index fa40ca3..f3d7a46 100644 --- a/hbase-archetypes/hbase-client-project/dependency-management.html +++ b/hbase-archetypes/hbase-client-project/dependency-management.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Exemplar for hbase-client archetype http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-archetypes/hbase-client-project/index.html -- diff --git a/hbase-archetypes/hbase-client-project/index.html b/hbase-archetypes/hbase-client-project/index.html index c0b48cb..0f85d1f 100644 --- a/hbase-archetypes/hbase-client-project/index.html +++ b/hbase-archetypes/hbase-client-project/index.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Exemplar for hbase-client archetype http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-archetypes/hbase-client-project/integration.html -- diff --git a/hbase-archetypes/hbase-client-project/integration.html b/hbase-archetypes/hbase-client-project/integration.html index 333a474..023f407 100644 --- a/hbase-archetypes/hbase-client-project/integration.html +++ b/hbase-archetypes/hbase-client-project/integration.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20
[07/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html index ff541c9..dedc08e 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.html @@ -71,369 +71,365 @@ 063 final SocketChannel channel; 064 private ByteBuff data; 065 private ByteBuffer dataLengthBuffer; -066 protected final ConcurrentLinkedDequeSimpleServerCall responseQueue = -067 new ConcurrentLinkedDeque(); -068 final Lock responseWriteLock = new ReentrantLock(); -069 private final LongAdder rpcCount = new LongAdder(); // number of outstanding rpcs -070 private long lastContact; -071 private final Socket socket; -072 private final SimpleRpcServerResponder responder; -073 -074 public SimpleServerRpcConnection(SimpleRpcServer rpcServer, SocketChannel channel, -075 long lastContact) { -076super(rpcServer); -077this.channel = channel; -078this.lastContact = lastContact; -079this.data = null; -080this.dataLengthBuffer = ByteBuffer.allocate(4); -081this.socket = channel.socket(); -082this.addr = socket.getInetAddress(); -083if (addr == null) { -084 this.hostAddress = "*Unknown*"; -085} else { -086 this.hostAddress = addr.getHostAddress(); -087} -088this.remotePort = socket.getPort(); -089if (rpcServer.socketSendBufferSize != 0) { -090 try { -091 socket.setSendBufferSize(rpcServer.socketSendBufferSize); -092 } catch (IOException e) { -093SimpleRpcServer.LOG.warn( -094 "Connection: unable to set socket send buffer size to " + rpcServer.socketSendBufferSize); -095 } -096} -097this.saslCall = new SimpleServerCall(SASL_CALLID, null, null, null, null, null, this, 0, null, -098null, System.currentTimeMillis(), 0, rpcServer.reservoir, rpcServer.cellBlockBuilder, null, -099rpcServer.responder); -100this.setConnectionHeaderResponseCall = new SimpleServerCall(CONNECTION_HEADER_RESPONSE_CALLID, -101null, null, null, null, null, this, 0, null, null, System.currentTimeMillis(), 0, -102rpcServer.reservoir, rpcServer.cellBlockBuilder, null, rpcServer.responder); -103this.authFailedCall = new SimpleServerCall(AUTHORIZATION_FAILED_CALLID, null, null, null, null, -104null, this, 0, null, null, System.currentTimeMillis(), 0, rpcServer.reservoir, -105rpcServer.cellBlockBuilder, null, rpcServer.responder); -106this.responder = rpcServer.responder; -107 } -108 -109 public void setLastContact(long lastContact) { -110this.lastContact = lastContact; -111 } -112 -113 public long getLastContact() { -114return lastContact; -115 } -116 -117 /* Return true if the connection has no outstanding rpc */ -118 boolean isIdle() { -119return rpcCount.sum() == 0; -120 } -121 -122 /* Decrement the outstanding RPC count */ -123 protected void decRpcCount() { -124rpcCount.decrement(); -125 } -126 -127 /* Increment the outstanding RPC count */ -128 protected void incRpcCount() { -129rpcCount.increment(); -130 } -131 -132 private int readPreamble() throws IOException { -133int count; -134// Check for 'HBas' magic. -135this.dataLengthBuffer.flip(); -136if (!Arrays.equals(HConstants.RPC_HEADER, dataLengthBuffer.array())) { -137 return doBadPreambleHandling( -138"Expected HEADER=" + Bytes.toStringBinary(HConstants.RPC_HEADER) + " but received HEADER=" + -139 Bytes.toStringBinary(dataLengthBuffer.array()) + " from " + toString()); +066 private ByteBuffer preambleBuffer; +067 protected final ConcurrentLinkedDequeSimpleServerCall responseQueue = +068 new ConcurrentLinkedDeque(); +069 final Lock responseWriteLock = new ReentrantLock(); +070 private final LongAdder rpcCount = new LongAdder(); // number of outstanding rpcs +071 private long lastContact; +072 private final Socket socket; +073 private final SimpleRpcServerResponder responder; +074 +075 public SimpleServerRpcConnection(SimpleRpcServer rpcServer, SocketChannel channel, +076 long lastContact) { +077super(rpcServer); +078this.channel = channel; +079this.lastContact = lastContact; +080this.data = null; +081this.dataLengthBuffer = ByteBuffer.allocate(4); +082this.socket = channel.socket(); +083this.addr = socket.getInetAddress(); +084if (addr == null) { +085 this.hostAddress = "*Unknown*"; +086} else { +087 this.hostAddress = addr.getHostAddress(); +088} +089this.remotePort = socket.getPort(); +090if
[09/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html index 877fae3..a700851 100644 --- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html @@ -125,9 +125,9 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteCompare +org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html index 320500a..326107e 100644 --- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html @@ -248,8 +248,8 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult org.apache.hadoop.hbase.regionserver.wal.RingBufferTruck.Type +org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrResult http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html index d9efa05..dbfbf42 100644 --- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html @@ -110,8 +110,8 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable) -org.apache.hadoop.hbase.rest.model.ScannerModel.FilterModel.FilterType org.apache.hadoop.hbase.rest.model.ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType +org.apache.hadoop.hbase.rest.model.ScannerModel.FilterModel.FilterType http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html index 56fea88..25784b8 100644 --- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html +++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html @@ -137,8 +137,8 @@ java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true; title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true; title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true; title="class or interface in java.io">Serializable)
[11/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/checkstyle-aggregate.html -- diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html index 0cec828..a095456 100644 --- a/checkstyle-aggregate.html +++ b/checkstyle-aggregate.html @@ -7,7 +7,7 @@ - + Apache HBase Checkstyle Results @@ -289,7 +289,7 @@ 2167 0 0 -14407 +14408 Files @@ -2567,7 +2567,7 @@ org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java 0 0 -4 +5 org/apache/hadoop/hbase/mapred/Driver.java 0 @@ -7178,7 +7178,7 @@ http://checkstyle.sourceforge.net/config_imports.html#UnusedImports;>UnusedImports processJavadoc: true -77 +78 Error indentation @@ -34546,55 +34546,61 @@ Error imports +UnusedImports +Unused import - java.util.Arrays. +27 + +Error +imports ImportOrder Wrong order for 'org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor' import. 49 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. -199 - +202 + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. -200 - +203 + Error blocks NeedBraces 'if' construct must use '{}'s. -392 +388 org/apache/hadoop/hbase/mapred/Driver.java - + Severity Category Rule Message Line - + Error imports ImportOrder Wrong order for 'com.google.common.annotations.VisibleForTesting' import. 26 - + Error design HideUtilityClassConstructor Utility classes should not have a public or default constructor. 32 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 44 - + Error javadoc NonEmptyAtclauseDescription @@ -34603,73 +34609,73 @@ org/apache/hadoop/hbase/mapred/GroupingTableMap.java - + Severity Category Rule Message Line - + Error imports ImportOrder Wrong order for 'org.apache.hadoop.hbase.Cell' import. 25 - + Error indentation Indentation 'extends' have incorrect indentation level 0, expected level should be 2. 42 - + Error indentation Indentation 'implements' have incorrect indentation level 0, expected level should be 2. 43 - + Error javadoc JavadocTagContinuationIndentation Line continuation have incorrect indentation level, expected level should be 2. 61 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 89 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 90 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 91 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 92 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 93 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 112 - + Error javadoc NonEmptyAtclauseDescription @@ -34678,31 +34684,31 @@ org/apache/hadoop/hbase/mapred/HRegionPartitioner.java - + Severity Category Rule Message Line - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 42 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 43 - + Error indentation Indentation 'implements' have incorrect indentation level 0, expected level should be 2. 47 - + Error whitespace ParenPad @@ -34711,55 +34717,55 @@ org/apache/hadoop/hbase/mapred/IdentityTableMap.java - + Severity Category Rule Message Line - + Error imports ImportOrder Wrong order for 'org.apache.hadoop.hbase.client.Result' import. 25 - + Error indentation Indentation 'extends' have incorrect indentation level 0, expected level should be 2. 36 - + Error indentation Indentation 'implements' have incorrect indentation level 0, expected level should be 2. 37 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 63 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 64 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 65 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 66 - + Error javadoc NonEmptyAtclauseDescription @@ -34768,49 +34774,49 @@ org/apache/hadoop/hbase/mapred/IdentityTableReduce.java - + Severity Category Rule Message Line - + Error indentation Indentation 'extends' have incorrect indentation level 0, expected level should be 2. 38 - + Error indentation Indentation 'implements' have incorrect indentation level 0, expected level should be 2. 39 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty description. 46 - + Error javadoc NonEmptyAtclauseDescription At-clause should have a non-empty
[04/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html index 5d61a6c..20455ea 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.html @@ -33,10 +33,10 @@ 025 requiredArguments = { 026@org.jamon.annotations.Argument(name = "regionServer", type = "HRegionServer")}, 027 optionalArguments = { -028@org.jamon.annotations.Argument(name = "format", type = "String"), -029@org.jamon.annotations.Argument(name = "bcv", type = "String"), -030@org.jamon.annotations.Argument(name = "filter", type = "String"), -031@org.jamon.annotations.Argument(name = "bcn", type = "String")}) +028@org.jamon.annotations.Argument(name = "bcn", type = "String"), +029@org.jamon.annotations.Argument(name = "filter", type = "String"), +030@org.jamon.annotations.Argument(name = "format", type = "String"), +031@org.jamon.annotations.Argument(name = "bcv", type = "String")}) 032public class RSStatusTmpl 033 extends org.jamon.AbstractTemplateProxy 034{ @@ -77,74 +77,74 @@ 069 return m_regionServer; 070} 071private HRegionServer m_regionServer; -072// 22, 1 -073public void setFormat(String format) +072// 23, 1 +073public void setBcn(String bcn) 074{ -075 // 22, 1 -076 m_format = format; -077 m_format__IsNotDefault = true; +075 // 23, 1 +076 m_bcn = bcn; +077 m_bcn__IsNotDefault = true; 078} -079public String getFormat() +079public String getBcn() 080{ -081 return m_format; +081 return m_bcn; 082} -083private String m_format; -084public boolean getFormat__IsNotDefault() +083private String m_bcn; +084public boolean getBcn__IsNotDefault() 085{ -086 return m_format__IsNotDefault; +086 return m_bcn__IsNotDefault; 087} -088private boolean m_format__IsNotDefault; -089// 24, 1 -090public void setBcv(String bcv) +088private boolean m_bcn__IsNotDefault; +089// 21, 1 +090public void setFilter(String filter) 091{ -092 // 24, 1 -093 m_bcv = bcv; -094 m_bcv__IsNotDefault = true; +092 // 21, 1 +093 m_filter = filter; +094 m_filter__IsNotDefault = true; 095} -096public String getBcv() +096public String getFilter() 097{ -098 return m_bcv; +098 return m_filter; 099} -100private String m_bcv; -101public boolean getBcv__IsNotDefault() +100private String m_filter; +101public boolean getFilter__IsNotDefault() 102{ -103 return m_bcv__IsNotDefault; +103 return m_filter__IsNotDefault; 104} -105private boolean m_bcv__IsNotDefault; -106// 21, 1 -107public void setFilter(String filter) +105private boolean m_filter__IsNotDefault; +106// 22, 1 +107public void setFormat(String format) 108{ -109 // 21, 1 -110 m_filter = filter; -111 m_filter__IsNotDefault = true; +109 // 22, 1 +110 m_format = format; +111 m_format__IsNotDefault = true; 112} -113public String getFilter() +113public String getFormat() 114{ -115 return m_filter; +115 return m_format; 116} -117private String m_filter; -118public boolean getFilter__IsNotDefault() +117private String m_format; +118public boolean getFormat__IsNotDefault() 119{ -120 return m_filter__IsNotDefault; +120 return m_format__IsNotDefault; 121} -122private boolean m_filter__IsNotDefault; -123// 23, 1 -124public void setBcn(String bcn) +122private boolean m_format__IsNotDefault; +123// 24, 1 +124public void setBcv(String bcv) 125{ -126 // 23, 1 -127 m_bcn = bcn; -128 m_bcn__IsNotDefault = true; +126 // 24, 1 +127 m_bcv = bcv; +128 m_bcv__IsNotDefault = true; 129} -130public String getBcn() +130public String getBcv() 131{ -132 return m_bcn; +132 return m_bcv; 133} -134private String m_bcn; -135public boolean getBcn__IsNotDefault() +134private String m_bcv; +135public boolean getBcv__IsNotDefault() 136{ -137 return m_bcn__IsNotDefault; +137 return m_bcv__IsNotDefault; 138} -139private boolean m_bcn__IsNotDefault; +139private boolean m_bcv__IsNotDefault; 140 } 141 @Override 142 protected org.jamon.AbstractTemplateProxy.ImplData makeImplData() @@ -156,31 +156,31 @@ 148return (ImplData) super.getImplData(); 149 } 150 -151 protected String format; -152 public final
[02/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-spark/issue-tracking.html -- diff --git a/hbase-spark/issue-tracking.html b/hbase-spark/issue-tracking.html index 1738863..32c924b 100644 --- a/hbase-spark/issue-tracking.html +++ b/hbase-spark/issue-tracking.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Spark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-spark/license.html -- diff --git a/hbase-spark/license.html b/hbase-spark/license.html index 9634842..71b0894 100644 --- a/hbase-spark/license.html +++ b/hbase-spark/license.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Spark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-spark/mail-lists.html -- diff --git a/hbase-spark/mail-lists.html b/hbase-spark/mail-lists.html index c67e693..d0d5872 100644 --- a/hbase-spark/mail-lists.html +++ b/hbase-spark/mail-lists.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Spark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-spark/plugin-management.html -- diff --git a/hbase-spark/plugin-management.html b/hbase-spark/plugin-management.html index ee2f459..2240147 100644 --- a/hbase-spark/plugin-management.html +++ b/hbase-spark/plugin-management.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Spark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-spark/plugins.html -- diff --git a/hbase-spark/plugins.html b/hbase-spark/plugins.html index 472a61b..9542844 100644 --- a/hbase-spark/plugins.html +++ b/hbase-spark/plugins.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +Last Published: 2017-05-21 | Version: 2.0.0-SNAPSHOT Apache HBase - Spark http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/hbase-spark/project-info.html -- diff --git a/hbase-spark/project-info.html b/hbase-spark/project-info.html index 5371c41..2b505e2 100644 --- a/hbase-spark/project-info.html +++ b/hbase-spark/project-info.html @@ -1,5 +1,5 @@ http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;> - + http://www.w3.org/1999/xhtml; xml:lang="en" lang="en"> @@ -10,7 +10,7 @@ @import url("./css/site.css"); - + @@ -27,7 +27,7 @@ -Last Published: 2017-05-20 +
hbase-site git commit: INFRA-10751 Empty commit
Repository: hbase-site Updated Branches: refs/heads/asf-site 1a8809746 -> bb0e51758 INFRA-10751 Empty commit Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/bb0e5175 Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/bb0e5175 Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/bb0e5175 Branch: refs/heads/asf-site Commit: bb0e51758299d7558200d7c961385ed08bfe951c Parents: 1a88097 Author: jenkinsAuthored: Sun May 21 14:59:21 2017 + Committer: jenkins Committed: Sun May 21 14:59:21 2017 + -- --
[08/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html -- diff --git a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html index 53fbd01..ccde903 100644 --- a/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html +++ b/devapidocs/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.ImplData.html @@ -293,76 +293,76 @@ extends org.jamon.AbstractTemplateProxy.ImplData privateHRegionServer m_regionServer - + -m_format -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_format +m_bcn +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_bcn - + -m_format__IsNotDefault -privateboolean m_format__IsNotDefault +m_bcn__IsNotDefault +privateboolean m_bcn__IsNotDefault - + -m_bcv -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_bcv +m_filter +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_filter - + -m_bcv__IsNotDefault -privateboolean m_bcv__IsNotDefault +m_filter__IsNotDefault +privateboolean m_filter__IsNotDefault - + -m_filter -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_filter +m_format +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_format - + -m_filter__IsNotDefault -privateboolean m_filter__IsNotDefault +m_format__IsNotDefault +privateboolean m_format__IsNotDefault - + -m_bcn -privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_bcn +m_bcv +privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">String m_bcv - + -m_bcn__IsNotDefault -privateboolean m_bcn__IsNotDefault +m_bcv__IsNotDefault +privateboolean m_bcv__IsNotDefault @@ -408,112 +408,112 @@ extends org.jamon.AbstractTemplateProxy.ImplData publicHRegionServergetRegionServer() - + -setFormat -publicvoidsetFormat(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringformat) +setBcn +publicvoidsetBcn(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringbcn) - + -getFormat -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetFormat() +getBcn +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetBcn() - + -getFormat__IsNotDefault -publicbooleangetFormat__IsNotDefault() +getBcn__IsNotDefault +publicbooleangetBcn__IsNotDefault() - + -setBcv -publicvoidsetBcv(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringbcv) +setFilter +publicvoidsetFilter(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringfilter) - + -getBcv -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetBcv() +getFilter +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetFilter() - + -getBcv__IsNotDefault -publicbooleangetBcv__IsNotDefault() +getFilter__IsNotDefault +publicbooleangetFilter__IsNotDefault() - + -setFilter -publicvoidsetFilter(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringfilter) +setFormat +publicvoidsetFormat(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">Stringformat) - + -getFilter -publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetFilter() +getFormat +publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true; title="class or interface in java.lang">StringgetFormat() - + -getFilter__IsNotDefault
[10/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/checkstyle.rss -- diff --git a/checkstyle.rss b/checkstyle.rss index 471fc41..c8ddcf2 100644 --- a/checkstyle.rss +++ b/checkstyle.rss @@ -26,7 +26,7 @@ under the License. 2007 - 2017 The Apache Software Foundation File: 2167, - Errors: 14407, + Errors: 14408, Warnings: 0, Infos: 0 @@ -9883,7 +9883,7 @@ under the License. 0 - 4 + 5 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/coc.html -- diff --git a/coc.html b/coc.html index 8e1108a..e7b1b51 100644 --- a/coc.html +++ b/coc.html @@ -7,7 +7,7 @@ - + Apache HBase Code of Conduct Policy @@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-05-20 + Last Published: 2017-05-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/cygwin.html -- diff --git a/cygwin.html b/cygwin.html index eeff6ab..f951641 100644 --- a/cygwin.html +++ b/cygwin.html @@ -7,7 +7,7 @@ - + Apache HBase Installing Apache HBase (TM) on Windows using Cygwin @@ -679,7 +679,7 @@ Now your HBase server is running, start coding and build that next https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-05-20 + Last Published: 2017-05-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/dependencies.html -- diff --git a/dependencies.html b/dependencies.html index 1115658..dfcff10 100644 --- a/dependencies.html +++ b/dependencies.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependencies @@ -524,7 +524,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-05-20 + Last Published: 2017-05-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/dependency-convergence.html -- diff --git a/dependency-convergence.html b/dependency-convergence.html index 1dd969b..b93bf1d 100644 --- a/dependency-convergence.html +++ b/dependency-convergence.html @@ -7,7 +7,7 @@ - + Apache HBase Reactor Dependency Convergence @@ -1849,7 +1849,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-05-20 + Last Published: 2017-05-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/dependency-info.html -- diff --git a/dependency-info.html b/dependency-info.html index 5ef1d73..93bde77 100644 --- a/dependency-info.html +++ b/dependency-info.html @@ -7,7 +7,7 @@ - + Apache HBase Dependency Information @@ -318,7 +318,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-05-20 + Last Published: 2017-05-21 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/dependency-management.html -- diff --git a/dependency-management.html b/dependency-management.html index 4521f48..e373e30 100644 --- a/dependency-management.html +++ b/dependency-management.html @@ -7,7 +7,7 @@ - + Apache HBase Project Dependency Management @@ -894,7 +894,7 @@ https://www.apache.org/;>The Apache Software Foundation. All rights reserved. - Last Published: 2017-05-20 + Last Published: 2017-05-21
[06/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html index 75604bc..8424538 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html @@ -69,15 +69,15 @@ 061 requiredArguments = { 062@org.jamon.annotations.Argument(name = "master", type = "HMaster")}, 063 optionalArguments = { -064@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"), -065@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean"), -066@org.jamon.annotations.Argument(name = "frags", type = "MapString,Integer"), -067@org.jamon.annotations.Argument(name = "filter", type = "String"), -068@org.jamon.annotations.Argument(name = "format", type = "String"), -069@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"), -070@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"), -071@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager"), -072@org.jamon.annotations.Argument(name = "servers", type = "ListServerName")}) +064@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"), +065@org.jamon.annotations.Argument(name = "frags", type = "MapString,Integer"), +066@org.jamon.annotations.Argument(name = "format", type = "String"), +067@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"), +068@org.jamon.annotations.Argument(name = "servers", type = "ListServerName"), +069@org.jamon.annotations.Argument(name = "filter", type = "String"), +070@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean"), +071@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"), +072@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager")}) 073public class MasterStatusTmpl 074 extends org.jamon.AbstractTemplateProxy 075{ @@ -118,159 +118,159 @@ 110 return m_master; 111} 112private HMaster m_master; -113// 28, 1 -114public void setServerManager(ServerManager serverManager) +113// 24, 1 +114public void setDeadServers(SetServerName deadServers) 115{ -116 // 28, 1 -117 m_serverManager = serverManager; -118 m_serverManager__IsNotDefault = true; +116 // 24, 1 +117 m_deadServers = deadServers; +118 m_deadServers__IsNotDefault = true; 119} -120public ServerManager getServerManager() +120public SetServerName getDeadServers() 121{ -122 return m_serverManager; +122 return m_deadServers; 123} -124private ServerManager m_serverManager; -125public boolean getServerManager__IsNotDefault() +124private SetServerName m_deadServers; +125public boolean getDeadServers__IsNotDefault() 126{ -127 return m_serverManager__IsNotDefault; +127 return m_deadServers__IsNotDefault; 128} -129private boolean m_serverManager__IsNotDefault; -130// 25, 1 -131public void setCatalogJanitorEnabled(boolean catalogJanitorEnabled) +129private boolean m_deadServers__IsNotDefault; +130// 21, 1 +131public void setFrags(MapString,Integer frags) 132{ -133 // 25, 1 -134 m_catalogJanitorEnabled = catalogJanitorEnabled; -135 m_catalogJanitorEnabled__IsNotDefault = true; +133 // 21, 1 +134 m_frags = frags; +135 m_frags__IsNotDefault = true; 136} -137public boolean getCatalogJanitorEnabled() +137public MapString,Integer getFrags() 138{ -139 return m_catalogJanitorEnabled; +139 return m_frags; 140} -141private boolean m_catalogJanitorEnabled; -142public boolean getCatalogJanitorEnabled__IsNotDefault() +141private MapString,Integer m_frags; +142public boolean getFrags__IsNotDefault() 143{ -144 return m_catalogJanitorEnabled__IsNotDefault; +144 return m_frags__IsNotDefault; 145} -146private boolean m_catalogJanitorEnabled__IsNotDefault; -147// 21, 1 -148public void setFrags(MapString,Integer frags) +146private boolean m_frags__IsNotDefault; +147// 27, 1 +148public void setFormat(String format) 149{ -150 // 21, 1 -151 m_frags = frags; -152 m_frags__IsNotDefault = true; +150 // 27, 1 +151 m_format = format; +152 m_format__IsNotDefault = true; 153} -154public MapString,Integer getFrags() +154public String getFormat() 155{ -156
[05/12] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1a880974/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html -- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html index 75604bc..8424538 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html @@ -69,15 +69,15 @@ 061 requiredArguments = { 062@org.jamon.annotations.Argument(name = "master", type = "HMaster")}, 063 optionalArguments = { -064@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"), -065@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean"), -066@org.jamon.annotations.Argument(name = "frags", type = "MapString,Integer"), -067@org.jamon.annotations.Argument(name = "filter", type = "String"), -068@org.jamon.annotations.Argument(name = "format", type = "String"), -069@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"), -070@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"), -071@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager"), -072@org.jamon.annotations.Argument(name = "servers", type = "ListServerName")}) +064@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"), +065@org.jamon.annotations.Argument(name = "frags", type = "MapString,Integer"), +066@org.jamon.annotations.Argument(name = "format", type = "String"), +067@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"), +068@org.jamon.annotations.Argument(name = "servers", type = "ListServerName"), +069@org.jamon.annotations.Argument(name = "filter", type = "String"), +070@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean"), +071@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"), +072@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager")}) 073public class MasterStatusTmpl 074 extends org.jamon.AbstractTemplateProxy 075{ @@ -118,159 +118,159 @@ 110 return m_master; 111} 112private HMaster m_master; -113// 28, 1 -114public void setServerManager(ServerManager serverManager) +113// 24, 1 +114public void setDeadServers(SetServerName deadServers) 115{ -116 // 28, 1 -117 m_serverManager = serverManager; -118 m_serverManager__IsNotDefault = true; +116 // 24, 1 +117 m_deadServers = deadServers; +118 m_deadServers__IsNotDefault = true; 119} -120public ServerManager getServerManager() +120public SetServerName getDeadServers() 121{ -122 return m_serverManager; +122 return m_deadServers; 123} -124private ServerManager m_serverManager; -125public boolean getServerManager__IsNotDefault() +124private SetServerName m_deadServers; +125public boolean getDeadServers__IsNotDefault() 126{ -127 return m_serverManager__IsNotDefault; +127 return m_deadServers__IsNotDefault; 128} -129private boolean m_serverManager__IsNotDefault; -130// 25, 1 -131public void setCatalogJanitorEnabled(boolean catalogJanitorEnabled) +129private boolean m_deadServers__IsNotDefault; +130// 21, 1 +131public void setFrags(MapString,Integer frags) 132{ -133 // 25, 1 -134 m_catalogJanitorEnabled = catalogJanitorEnabled; -135 m_catalogJanitorEnabled__IsNotDefault = true; +133 // 21, 1 +134 m_frags = frags; +135 m_frags__IsNotDefault = true; 136} -137public boolean getCatalogJanitorEnabled() +137public MapString,Integer getFrags() 138{ -139 return m_catalogJanitorEnabled; +139 return m_frags; 140} -141private boolean m_catalogJanitorEnabled; -142public boolean getCatalogJanitorEnabled__IsNotDefault() +141private MapString,Integer m_frags; +142public boolean getFrags__IsNotDefault() 143{ -144 return m_catalogJanitorEnabled__IsNotDefault; +144 return m_frags__IsNotDefault; 145} -146private boolean m_catalogJanitorEnabled__IsNotDefault; -147// 21, 1 -148public void setFrags(MapString,Integer frags) +146private boolean m_frags__IsNotDefault; +147// 27, 1 +148public void setFormat(String format) 149{ -150 // 21, 1 -151 m_frags = frags; -152 m_frags__IsNotDefault = true; +150 // 27, 1 +151 m_format = format; +152 m_format__IsNotDefault = true; 153} -154public MapString,Integer getFrags() +154public String getFormat() 155{ -156 return m_frags; +156 return m_format; 157
hbase git commit: HBASE-15616 (addendum) Allow null qualifier for all table operations
Repository: hbase Updated Branches: refs/heads/master 1ceb25cf0 -> 553d5db35 HBASE-15616 (addendum) Allow null qualifier for all table operations Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/553d5db3 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/553d5db3 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/553d5db3 Branch: refs/heads/master Commit: 553d5db35565ce5c7bb7f08dfb35ed9422b6bafb Parents: 1ceb25c Author: Guanghao ZhangAuthored: Fri May 19 18:31:32 2017 +0800 Committer: Guanghao Zhang Committed: Mon May 22 09:42:02 2017 +0800 -- .../main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java| 1 - 1 file changed, 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/553d5db3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java index 73ebebb..3c551df 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java @@ -184,7 +184,6 @@ public interface AsyncTableBase { long amount, Durability durability) { Preconditions.checkNotNull(row, "row is null"); Preconditions.checkNotNull(family, "family is null"); -Preconditions.checkNotNull(qualifier, "qualifier is null"); return increment( new Increment(row).addColumn(family, qualifier, amount).setDurability(durability)) .thenApply(r -> Bytes.toLong(r.getValue(family, qualifier)));
[2/2] hbase git commit: HBASE-18067 Allow default FORMATTER for shell put/get commands
HBASE-18067 Allow default FORMATTER for shell put/get commands Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f1544c34 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f1544c34 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f1544c34 Branch: refs/heads/master Commit: f1544c34669a69cb3ed163e45274f2fcca18e3eb Parents: 553d5db Author: Josh ElserAuthored: Wed May 17 19:19:23 2017 -0400 Committer: Josh Elser Committed: Sun May 21 22:24:12 2017 -0400 -- hbase-shell/src/main/ruby/hbase/table.rb| 51 +++--- hbase-shell/src/main/ruby/hbase_constants.rb| 2 + hbase-shell/src/main/ruby/shell/commands/get.rb | 9 +- .../src/main/ruby/shell/commands/scan.rb| 9 +- .../src/test/ruby/shell/converter_test.rb | 157 +++ 5 files changed, 206 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/f1544c34/hbase-shell/src/main/ruby/hbase/table.rb -- diff --git a/hbase-shell/src/main/ruby/hbase/table.rb b/hbase-shell/src/main/ruby/hbase/table.rb index 22bbcfe..946c72c 100644 --- a/hbase-shell/src/main/ruby/hbase/table.rb +++ b/hbase-shell/src/main/ruby/hbase/table.rb @@ -347,6 +347,8 @@ EOF authorizations = args[AUTHORIZATIONS] consistency = args.delete(CONSISTENCY) if args[CONSISTENCY] replicaId = args.delete(REGION_REPLICA_ID) if args[REGION_REPLICA_ID] + converter = args.delete(FORMATTER) || nil + converter_class = args.delete(FORMATTER_CLASS) || 'org.apache.hadoop.hbase.util.Bytes' unless args.empty? columns = args[COLUMN] || args[COLUMNS] if args[VERSIONS] @@ -419,13 +421,13 @@ EOF # Print out results. Result can be Cell or RowResult. res = {} result.listCells.each do |c| -family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray, - c.getFamilyOffset, c.getFamilyLength) -qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray, - c.getQualifierOffset, c.getQualifierLength) +family = convert_bytes_with_position(c.getFamilyArray, + c.getFamilyOffset, c.getFamilyLength, converter_class, converter) +qualifier = convert_bytes_with_position(c.getQualifierArray, + c.getQualifierOffset, c.getQualifierLength, converter_class, converter) column = "#{family}:#{qualifier}" -value = to_string(column, c, maxlength) +value = to_string(column, c, maxlength, converter_class, converter) if block_given? yield(column, value) @@ -544,6 +546,8 @@ EOF limit = args["LIMIT"] || -1 maxlength = args.delete("MAXLENGTH") || -1 + converter = args.delete(FORMATTER) || nil + converter_class = args.delete(FORMATTER_CLASS) || 'org.apache.hadoop.hbase.util.Bytes' count = 0 res = {} @@ -555,17 +559,17 @@ EOF # Iterate results while iter.hasNext row = iter.next -key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow) +key = convert_bytes(row.getRow, nil, converter) is_stale |= row.isStale row.listCells.each do |c| - family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray, -c.getFamilyOffset, c.getFamilyLength) - qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray, -c.getQualifierOffset, c.getQualifierLength) + family = convert_bytes_with_position(c.getFamilyArray, +c.getFamilyOffset, c.getFamilyLength, converter_class, converter) + qualifier = convert_bytes_with_position(c.getQualifierArray, +c.getQualifierOffset, c.getQualifierLength, converter_class, converter) column = "#{family}:#{qualifier}" - cell = to_string(column, c, maxlength) + cell = to_string(column, c, maxlength, converter_class, converter) if block_given? yield(key, "column=#{column}, #{cell}") @@ -693,7 +697,7 @@ EOF # Make a String of the passed kv # Intercept cells whose format we know such as the info:regioninfo in hbase:meta -def to_string(column, kv, maxlength = -1) +def to_string(column, kv, maxlength = -1, converter_class = nil, converter=nil) if is_meta_table? if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB' hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValueArray, @@ -715,16 +719,16 @@ EOF if kv.isDelete val = "timestamp=#{kv.getTimestamp},
[1/2] hbase git commit: HBASE-18075 Support non-latin table names and namespaces
Repository: hbase Updated Branches: refs/heads/master 553d5db35 -> 709f5a198 HBASE-18075 Support non-latin table names and namespaces Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/709f5a19 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/709f5a19 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/709f5a19 Branch: refs/heads/master Commit: 709f5a1980abe65eb85c638dacaaea8502ee0034 Parents: f1544c3 Author: Josh ElserAuthored: Thu May 18 18:38:25 2017 -0400 Committer: Josh Elser Committed: Sun May 21 22:24:12 2017 -0400 -- .../hadoop/hbase/TestHTableDescriptor.java | 20 - .../java/org/apache/hadoop/hbase/TableName.java | 84 .../org/apache/hadoop/hbase/io/HFileLink.java | 3 +- 3 files changed, 73 insertions(+), 34 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/709f5a19/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java -- diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java index 9a6d3e3..bcff565 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; +import java.util.Arrays; import java.util.regex.Pattern; import org.apache.commons.logging.Log; @@ -181,10 +182,13 @@ public class TestHTableDescriptor { String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok", "with-dash.with_underscore", "02-01-2012.my_table_01-02", "xyz._mytable_", "9_9_0.table_02" , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", "legal..legal.t2", - "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", "ns:my_table_01-02"}; + "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", "ns:my_table_01-02", + "æ±", "æ±:å", "_å_", "foo:å", "foo.å", "å.foo"}; + // Avoiding "zookeeper" in here as it's tough to encode in regex String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", "spaces not ok", "-dash-.start_illegal", "new.table with space", "01 .table", "ns:-illegaldash", - "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"}; + "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2", String.valueOf((char)130), + String.valueOf((char)5), String.valueOf((char)65530)}; @Test public void testLegalHTableNames() { @@ -206,6 +210,18 @@ public class TestHTableDescriptor { } @Test + public void testIllegalZooKeeperName() { +for (String name : Arrays.asList("zookeeper", "ns:zookeeper", "zookeeper:table")) { + try { +TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(name)); +fail("invalid tablename " + name + " should have failed"); + } catch (Exception e) { +// expected + } +} + } + + @Test public void testLegalHTableNamesRegex() { for (String tn : legalTableNames) { TableName tName = TableName.valueOf(tn); http://git-wip-us.apache.org/repos/asf/hbase/blob/709f5a19/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java index c4c15d0..a9b2527 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; @@ -66,10 +67,10 @@ public final class TableName implements Comparable { // in default namespace //Allows only letters, digits and '_' public static final String VALID_NAMESPACE_REGEX = - "(?:[a-zA-Z_0-9]+)"; + "(?:[_\\p{Digit}\\p{IsAlphabetic}]+)"; //Allows only letters, digits, '_', '-' and '.' public static final String VALID_TABLE_QUALIFIER_REGEX = - "(?:[a-zA-Z_0-9][a-zA-Z_0-9-.]*)"; + "(?:[_\\p{Digit}\\p{IsAlphabetic}][-_.\\p{Digit}\\p{IsAlphabetic}]*)"; //Concatenation of NAMESPACE_REGEX and TABLE_QUALIFIER_REGEX, //with NAMESPACE_DELIM as delimiter public static final String VALID_USER_TABLE_REGEX = @@ -87,6 +88,9 @@ public