hbase git commit: HBASE-17849 PE tool random read is not totally random (Ram)

2017-06-06 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/master 929c9dab1 -> 1d3252eb5


HBASE-17849 PE tool random read is not totally random (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1d3252eb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1d3252eb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1d3252eb

Branch: refs/heads/master
Commit: 1d3252eb59a0e7dbc2f120e68a22d9429bc596a9
Parents: 929c9da
Author: Ramkrishna 
Authored: Wed Jun 7 11:28:09 2017 +0530
Committer: Ramkrishna 
Committed: Wed Jun 7 11:28:09 2017 +0530

--
 .../hadoop/hbase/PerformanceEvaluation.java | 36 +---
 .../hadoop/hbase/TestPerformanceEvaluation.java | 32 +
 2 files changed, 56 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1d3252eb/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index d0b7319..2c5cb65 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -124,6 +124,8 @@ import com.codahale.metrics.UniformReservoir;
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
 public class PerformanceEvaluation extends Configured implements Tool {
+  static final String RANDOM_SEEK_SCAN = "randomSeekScan";
+  static final String RANDOM_READ = "randomRead";
   private static final Log LOG = 
LogFactory.getLog(PerformanceEvaluation.class.getName());
   private static final ObjectMapper MAPPER = new ObjectMapper();
   static {
@@ -151,9 +153,9 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   private static final Path PERF_EVAL_DIR = new Path("performance_evaluation");
 
   static {
-addCommandDescriptor(RandomReadTest.class, "randomRead",
+addCommandDescriptor(RandomReadTest.class, RANDOM_READ,
   "Run random read test");
-addCommandDescriptor(RandomSeekScanTest.class, "randomSeekScan",
+addCommandDescriptor(RandomSeekScanTest.class, RANDOM_SEEK_SCAN,
   "Run random seek and scan 100 test");
 addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
   "Run random seek scan with both start and stop row (max 10 rows)");
@@ -1769,7 +1771,11 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
   }
 
   static byte [] getRandomRow(final Random random, final int totalRows) {
-return format(random.nextInt(Integer.MAX_VALUE) % totalRows);
+return format(generateRandomRow(random, totalRows));
+  }
+
+  static int generateRandomRow(final Random random, final int totalRows) {
+return random.nextInt(Integer.MAX_VALUE) % totalRows;
   }
 
   static RunResult runOneClient(final Class cmd, Configuration 
conf, Connection con,
@@ -1872,9 +1878,15 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 System.err.println("Table Creation / Write Tests:");
 System.err.println(" table   Alternate table name. Default: 
'TestTable'");
 System.err.println(" rowsRows each client runs. Default: "
-+ DEFAULT_OPTS.getPerClientRunRows());
-System.err.println(" sizeTotal size in GiB. Mutually exclusive 
with --rows. " +
-  "Default: 1.0.");
++ DEFAULT_OPTS.getPerClientRunRows()
++ ".  In case of randomReads and randomSeekScans this could"
++ " be specified along with --size to specify the number of rows to be 
scanned within"
++ " the total range specified by the size.");
+System.err.println(
+  " sizeTotal size in GiB. Mutually exclusive with --rows for 
writes and scans"
+  + ". But for randomReads and randomSeekScans when you use size with 
--rows you could"
+  + " use size to specify the end range and --rows"
+  + " specifies the number of rows within that range. " + "Default: 
1.0.");
 System.err.println(" compressCompression type to use (GZ, LZO, 
...). Default: 'NONE'");
 System.err.println(" flushCommitsUsed to determine if the test should 
flush the table. " +
   "Default: false");
@@ -2193,11 +2205,6 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 } catch (NoSuchElementException | NumberFormatException e) {
   throw new IllegalArgumentException("Command " + cmd + " does not 
have threads number", e);
 }
-if (opts.size != DEFAULT_OPTS.size 

hbase git commit: HBASE-18181 Move master branch to version 3.0.0-SNAPSHOT post creation of branch-2

2017-06-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 0d0c33040 -> 929c9dab1


HBASE-18181 Move master branch to version 3.0.0-SNAPSHOT post creation of 
branch-2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/929c9dab
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/929c9dab
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/929c9dab

Branch: refs/heads/master
Commit: 929c9dab14833e04b1e37d173143f7d22d1496eb
Parents: 0d0c330
Author: Michael Stack 
Authored: Tue Jun 6 22:04:39 2017 -0700
Committer: Michael Stack 
Committed: Tue Jun 6 22:04:39 2017 -0700

--
 hbase-annotations/pom.xml| 2 +-
 hbase-archetypes/hbase-archetype-builder/pom.xml | 2 +-
 hbase-archetypes/hbase-client-project/pom.xml| 2 +-
 hbase-archetypes/hbase-shaded-client-project/pom.xml | 2 +-
 hbase-archetypes/pom.xml | 2 +-
 hbase-assembly/pom.xml   | 2 +-
 hbase-checkstyle/pom.xml | 4 ++--
 hbase-client/pom.xml | 2 +-
 hbase-common/pom.xml | 2 +-
 hbase-endpoint/pom.xml   | 2 +-
 hbase-examples/pom.xml   | 2 +-
 hbase-external-blockcache/pom.xml| 2 +-
 hbase-hadoop-compat/pom.xml  | 2 +-
 hbase-hadoop2-compat/pom.xml | 2 +-
 hbase-it/pom.xml | 2 +-
 hbase-metrics-api/pom.xml| 2 +-
 hbase-metrics/pom.xml| 2 +-
 hbase-prefix-tree/pom.xml| 2 +-
 hbase-procedure/pom.xml  | 2 +-
 hbase-protocol-shaded/pom.xml| 2 +-
 hbase-protocol/pom.xml   | 2 +-
 hbase-resource-bundle/pom.xml| 2 +-
 hbase-rest/pom.xml   | 2 +-
 hbase-rsgroup/pom.xml| 2 +-
 hbase-server/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-client/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-server/pom.xml | 2 +-
 hbase-shaded/pom.xml | 2 +-
 hbase-shell/pom.xml  | 2 +-
 hbase-spark/pom.xml  | 2 +-
 hbase-testing-util/pom.xml   | 2 +-
 hbase-thrift/pom.xml | 2 +-
 pom.xml  | 2 +-
 33 files changed, 34 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/929c9dab/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index d9e0a44..4dcf143 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-2.0.0-SNAPSHOT
+3.0.0-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/929c9dab/hbase-archetypes/hbase-archetype-builder/pom.xml
--
diff --git a/hbase-archetypes/hbase-archetype-builder/pom.xml 
b/hbase-archetypes/hbase-archetype-builder/pom.xml
index 1672dea..6d79374 100644
--- a/hbase-archetypes/hbase-archetype-builder/pom.xml
+++ b/hbase-archetypes/hbase-archetype-builder/pom.xml
@@ -25,7 +25,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-SNAPSHOT
+3.0.0-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/929c9dab/hbase-archetypes/hbase-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-client-project/pom.xml 
b/hbase-archetypes/hbase-client-project/pom.xml
index 486f3ee..71f25e3 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -26,7 +26,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-SNAPSHOT
+3.0.0-SNAPSHOT
 ..
   
   hbase-client-project

http://git-wip-us.apache.org/repos/asf/hbase/blob/929c9dab/hbase-archetypes/hbase-shaded-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml 
b/hbase-archetypes/hbase-shaded-client-project/pom.xml
index 6f73033..59acf94 100644
--- a/hbase-archetypes/hbase-shaded-client-project/pom.xml
+++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml
@@ -26,7 +26,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-SNAPSHOT
+3.0.0-SNAPSHOT
 

[hbase] Git Push Summary

2017-06-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 [created] 0d0c33040


[hbase] Git Push Summary

2017-06-06 Thread busbey
Repository: hbase
Updated Tags:  refs/tags/rel/1.2.6 [created] f0bc8e552


hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 2f1923a82 -> 0d0c33040


HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d0c3304
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d0c3304
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d0c3304

Branch: refs/heads/master
Commit: 0d0c330401ade938bf934aafd79ec23705edcc60
Parents: 2f1923a
Author: huzheng 
Authored: Sat May 27 16:58:00 2017 +0800
Committer: tedyu 
Committed: Tue Jun 6 21:08:12 2017 -0700

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  76 +++-
 .../hadoop/hbase/filter/TestFilterList.java | 117 +++
 2 files changed, 191 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d0c3304/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 0742b22..985cb16 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -66,6 +66,14 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Filter seekHintFilter = null;
 
+  /**
+   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
+   * we use the previous return code to decide whether we should pass current 
cell encountered to
+   * the filter. For MUST_PASS_ALL, the two list are meaningless.
+   */
+  private List prevFilterRCList = null;
+  private List prevCellList = null;
+
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -87,6 +95,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -106,6 +115,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -117,6 +127,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -128,8 +139,21 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
+  }
+
+  public void initPrevListForMustPassOne(int size) {
+if (operator == Operator.MUST_PASS_ONE) {
+  if (this.prevCellList == null) {
+prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+  if (this.prevCellList == null) {
+prevCellList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+}
   }
 
+
   /**
* Get the operator.
*
@@ -184,6 +208,10 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
+if (operator == Operator.MUST_PASS_ONE) {
+  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
+  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
+}
   }
 
   /**
@@ -200,6 +228,10 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
+  if (operator == Operator.MUST_PASS_ONE) {
+prevFilterRCList.set(i, null);
+prevCellList.set(i, null);
+  }
 }
 seekHintFilter = null;
   }
@@ -282,6 +314,41 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
+  /**
+   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
+   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
+   * in filter list return SKIP, then the filter list will return SKIP. In 
this case, we should pass
+   * the cell following the previous cell, and it's possible that the next 
cell has the same 

[5/5] hbase git commit: HBASE-15576 Scanning cursor to prevent blocking long time on ResultScanner.next()

2017-06-06 Thread yangzhe1991
HBASE-15576 Scanning cursor to prevent blocking long time on 
ResultScanner.next()


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/381c89b5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/381c89b5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/381c89b5

Branch: refs/heads/branch-1
Commit: 381c89b5cc463b58010ab443de9e1c1d40d8bf98
Parents: 9cb57ae
Author: Phil Yang 
Authored: Tue Jun 6 15:39:24 2017 +0800
Committer: Phil Yang 
Committed: Wed Jun 7 11:37:22 2017 +0800

--
 .../hadoop/hbase/client/ClientScanner.java  |   15 +
 .../apache/hadoop/hbase/client/ClientUtil.java  |4 +
 .../org/apache/hadoop/hbase/client/Cursor.java  |   43 +
 .../org/apache/hadoop/hbase/client/Result.java  |   45 +
 .../org/apache/hadoop/hbase/client/Scan.java|   42 +
 .../hadoop/hbase/client/ScannerCallable.java|   14 +-
 .../client/ScannerCallableWithReplicas.java |4 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |   23 +
 .../hbase/protobuf/generated/ClientProtos.java  | 1086 +++---
 hbase-protocol/src/main/protobuf/Client.proto   |   14 +-
 .../hbase/regionserver/RSRpcServices.java   |   18 +-
 .../hbase/regionserver/ScannerContext.java  |   13 +
 .../hadoop/hbase/regionserver/StoreScanner.java |1 +
 .../hbase/regionserver/TestScannerCursor.java   |  191 +++
 14 files changed, 1375 insertions(+), 138 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/381c89b5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index 8e94c7c..d548901 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -505,6 +505,21 @@ public abstract class ClientScanner extends 
AbstractClientScanner {
   break;
 }
   }
+  if (cache.isEmpty() && !closed && scan.isNeedCursorResult()) {
+if (callable.isHeartbeatMessage() && callable.getCursor() != null) {
+  // Use cursor row key from server
+  cache.add(Result.createCursorResult(callable.getCursor()));
+  break;
+}
+if (values.length > 0) {
+  // It is size limit exceed and we need return the last Result's row.
+  // When user setBatch and the scanner is reopened, the server may 
return Results that
+  // user has seen and the last Result can not be seen because the 
number is not enough.
+  // So the row keys of results may not be same, we must use the last 
one.
+  cache.add(Result.createCursorResult(new Cursor(values[values.length 
- 1].getRow(;
+  break;
+}
+  }
   if (countdown <= 0) {
 // we have enough result.
 closeScannerIfExhausted(regionExhausted);

http://git-wip-us.apache.org/repos/asf/hbase/blob/381c89b5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
index e4a84d5..a839080 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
@@ -27,4 +27,8 @@ public class ClientUtil {
   public static boolean areScanStartRowAndStopRowEqual(byte[] startRow, byte[] 
stopRow) {
 return startRow != null && startRow.length > 0 && Bytes.equals(startRow, 
stopRow);
   }
+
+  public static Cursor createCursor(byte[] row) {
+return new Cursor(row);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/381c89b5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java
new file mode 100644
index 000..374025e
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * 

[1/5] hbase git commit: HBASE-15576 Scanning cursor to prevent blocking long time on ResultScanner.next()

2017-06-06 Thread yangzhe1991
Repository: hbase
Updated Branches:
  refs/heads/branch-1 9cb57ae35 -> 381c89b5c
  refs/heads/master 80e15aac2 -> 2f1923a82


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1923a8/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index 1bb57c4..e25064f 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -14267,6 +14267,16 @@ public final class ClientProtos {
  * optional .hbase.pb.Scan.ReadType readType = 23 [default = 
DEFAULT];
  */
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType 
getReadType();
+
+// optional bool need_cursor_result = 24 [default = false];
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+boolean hasNeedCursorResult();
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+boolean getNeedCursorResult();
   }
   /**
* Protobuf type {@code hbase.pb.Scan}
@@ -14482,6 +14492,11 @@ public final class ClientProtos {
   }
   break;
 }
+case 192: {
+  bitField0_ |= 0x0010;
+  needCursorResult_ = input.readBool();
+  break;
+}
   }
 }
   } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -15070,6 +15085,22 @@ public final class ClientProtos {
   return readType_;
 }
 
+// optional bool need_cursor_result = 24 [default = false];
+public static final int NEED_CURSOR_RESULT_FIELD_NUMBER = 24;
+private boolean needCursorResult_;
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+public boolean hasNeedCursorResult() {
+  return ((bitField0_ & 0x0010) == 0x0010);
+}
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+public boolean getNeedCursorResult() {
+  return needCursorResult_;
+}
+
 private void initFields() {
   column_ = java.util.Collections.emptyList();
   attribute_ = java.util.Collections.emptyList();
@@ -15094,6 +15125,7 @@ public final class ClientProtos {
   includeStartRow_ = true;
   includeStopRow_ = false;
   readType_ = 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT;
+  needCursorResult_ = false;
 }
 private byte memoizedIsInitialized = -1;
 public final boolean isInitialized() {
@@ -15200,6 +15232,9 @@ public final class ClientProtos {
   if (((bitField0_ & 0x0008) == 0x0008)) {
 output.writeEnum(23, readType_.getNumber());
   }
+  if (((bitField0_ & 0x0010) == 0x0010)) {
+output.writeBool(24, needCursorResult_);
+  }
   getUnknownFields().writeTo(output);
 }
 
@@ -15301,6 +15336,10 @@ public final class ClientProtos {
 size += com.google.protobuf.CodedOutputStream
   .computeEnumSize(23, readType_.getNumber());
   }
+  if (((bitField0_ & 0x0010) == 0x0010)) {
+size += com.google.protobuf.CodedOutputStream
+  .computeBoolSize(24, needCursorResult_);
+  }
   size += getUnknownFields().getSerializedSize();
   memoizedSerializedSize = size;
   return size;
@@ -15430,6 +15469,11 @@ public final class ClientProtos {
 result = result &&
 (getReadType() == other.getReadType());
   }
+  result = result && (hasNeedCursorResult() == 
other.hasNeedCursorResult());
+  if (hasNeedCursorResult()) {
+result = result && (getNeedCursorResult()
+== other.getNeedCursorResult());
+  }
   result = result &&
   getUnknownFields().equals(other.getUnknownFields());
   return result;
@@ -15535,6 +15579,10 @@ public final class ClientProtos {
 hash = (37 * hash) + READTYPE_FIELD_NUMBER;
 hash = (53 * hash) + hashEnum(getReadType());
   }
+  if (hasNeedCursorResult()) {
+hash = (37 * hash) + NEED_CURSOR_RESULT_FIELD_NUMBER;
+hash = (53 * hash) + hashBoolean(getNeedCursorResult());
+  }
   hash = (29 * hash) + getUnknownFields().hashCode();
   memoizedHashCode = hash;
   return hash;
@@ -15726,6 +15774,8 @@ public final class ClientProtos {
 bitField0_ = (bitField0_ & ~0x0020);
 readType_ = 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT;
 bitField0_ = (bitField0_ & ~0x0040);
+needCursorResult_ = false;
+bitField0_ = (bitField0_ & ~0x0080);
 return this;
   }
 
@@ -15869,6 

[2/5] hbase git commit: HBASE-15576 Scanning cursor to prevent blocking long time on ResultScanner.next()

2017-06-06 Thread yangzhe1991
http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1923a8/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
index a5f81e6..b93f6cc 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java
@@ -14598,6 +14598,15 @@ public final class ClientProtos {
  * optional .hbase.pb.Scan.ReadType readType = 23 [default = 
DEFAULT];
  */
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType 
getReadType();
+
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+boolean hasNeedCursorResult();
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+boolean getNeedCursorResult();
   }
   /**
* 
@@ -14642,6 +14651,7 @@ public final class ClientProtos {
   includeStartRow_ = true;
   includeStopRow_ = false;
   readType_ = 0;
+  needCursorResult_ = false;
 }
 
 @java.lang.Override
@@ -14827,6 +14837,11 @@ public final class ClientProtos {
   }
   break;
 }
+case 192: {
+  bitField0_ |= 0x0010;
+  needCursorResult_ = input.readBool();
+  break;
+}
   }
 }
   } catch 
(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
 e) {
@@ -15387,6 +15402,21 @@ public final class ClientProtos {
   return result == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan.ReadType.DEFAULT
 : result;
 }
 
+public static final int NEED_CURSOR_RESULT_FIELD_NUMBER = 24;
+private boolean needCursorResult_;
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+public boolean hasNeedCursorResult() {
+  return ((bitField0_ & 0x0010) == 0x0010);
+}
+/**
+ * optional bool need_cursor_result = 24 [default = false];
+ */
+public boolean getNeedCursorResult() {
+  return needCursorResult_;
+}
+
 private byte memoizedIsInitialized = -1;
 public final boolean isInitialized() {
   byte isInitialized = memoizedIsInitialized;
@@ -15492,6 +15522,9 @@ public final class ClientProtos {
   if (((bitField0_ & 0x0008) == 0x0008)) {
 output.writeEnum(23, readType_);
   }
+  if (((bitField0_ & 0x0010) == 0x0010)) {
+output.writeBool(24, needCursorResult_);
+  }
   unknownFields.writeTo(output);
 }
 
@@ -15592,6 +15625,10 @@ public final class ClientProtos {
 size += 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
   .computeEnumSize(23, readType_);
   }
+  if (((bitField0_ & 0x0010) == 0x0010)) {
+size += 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+  .computeBoolSize(24, needCursorResult_);
+  }
   size += unknownFields.getSerializedSize();
   memoizedSize = size;
   return size;
@@ -15713,6 +15750,11 @@ public final class ClientProtos {
   if (hasReadType()) {
 result = result && readType_ == other.readType_;
   }
+  result = result && (hasNeedCursorResult() == 
other.hasNeedCursorResult());
+  if (hasNeedCursorResult()) {
+result = result && (getNeedCursorResult()
+== other.getNeedCursorResult());
+  }
   result = result && unknownFields.equals(other.unknownFields);
   return result;
 }
@@ -15825,6 +15867,11 @@ public final class ClientProtos {
 hash = (37 * hash) + READTYPE_FIELD_NUMBER;
 hash = (53 * hash) + readType_;
   }
+  if (hasNeedCursorResult()) {
+hash = (37 * hash) + NEED_CURSOR_RESULT_FIELD_NUMBER;
+hash = (53 * hash) + 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
+getNeedCursorResult());
+  }
   hash = (29 * hash) + unknownFields.hashCode();
   memoizedHashCode = hash;
   return hash;
@@ -16024,6 +16071,8 @@ public final class ClientProtos {
 bitField0_ = (bitField0_ & ~0x0020);
 readType_ = 0;
 bitField0_ = (bitField0_ & ~0x0040);
+needCursorResult_ = false;
+bitField0_ = (bitField0_ & ~0x0080);
 return this;
   }
 
@@ -16163,6 +16212,10 @@ public final class ClientProtos {
   to_bitField0_ |= 0x0008;
 }
 result.readType_ = readType_;
+if (((from_bitField0_ & 0x0080) == 0x0080)) {
+  to_bitField0_ |= 

[3/5] hbase git commit: HBASE-15576 Scanning cursor to prevent blocking long time on ResultScanner.next()

2017-06-06 Thread yangzhe1991
HBASE-15576 Scanning cursor to prevent blocking long time on 
ResultScanner.next()


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f1923a8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f1923a8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f1923a8

Branch: refs/heads/master
Commit: 2f1923a8233b0c999494cd4b33f85b70dc5d7b12
Parents: 80e15aa
Author: Phil Yang 
Authored: Thu May 25 15:18:58 2017 +0800
Committer: Phil Yang 
Committed: Wed Jun 7 11:32:04 2017 +0800

--
 .../hadoop/hbase/client/ClientScanner.java  |   15 +
 .../apache/hadoop/hbase/client/ClientUtil.java  |4 +
 .../org/apache/hadoop/hbase/client/Cursor.java  |   41 +
 .../org/apache/hadoop/hbase/client/Result.java  |   45 +
 .../org/apache/hadoop/hbase/client/Scan.java|   43 +
 .../hadoop/hbase/client/ScannerCallable.java|   14 +-
 .../client/ScannerCallableWithReplicas.java |4 +
 .../hbase/shaded/protobuf/ProtobufUtil.java |   24 +
 .../shaded/protobuf/generated/ClientProtos.java | 1172 +++---
 .../src/main/protobuf/Client.proto  |   14 +-
 .../hbase/protobuf/generated/ClientProtos.java  | 1128 ++---
 hbase-protocol/src/main/protobuf/Client.proto   |   14 +-
 .../hbase/regionserver/RSRpcServices.java   |   18 +-
 .../hbase/regionserver/ScannerContext.java  |   13 +
 .../hadoop/hbase/regionserver/StoreScanner.java |1 +
 .../hbase/regionserver/TestScannerCursor.java   |  191 +++
 16 files changed, 2422 insertions(+), 319 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1923a8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index fa5f868..59cf005 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -499,6 +499,21 @@ public abstract class ClientScanner extends 
AbstractClientScanner {
   break;
 }
   }
+  if (cache.isEmpty() && !closed && scan.isNeedCursorResult()) {
+if (callable.isHeartbeatMessage() && callable.getCursor() != null) {
+  // Use cursor row key from server
+  cache.add(Result.createCursorResult(callable.getCursor()));
+  break;
+}
+if (values.length > 0) {
+  // It is size limit exceed and we need return the last Result's row.
+  // When user setBatch and the scanner is reopened, the server may 
return Results that
+  // user has seen and the last Result can not be seen because the 
number is not enough.
+  // So the row keys of results may not be same, we must use the last 
one.
+  cache.add(Result.createCursorResult(new Cursor(values[values.length 
- 1].getRow(;
+  break;
+}
+  }
   if (countdown <= 0) {
 // we have enough result.
 closeScannerIfExhausted(regionExhausted);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1923a8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
index e4a84d5..a839080 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientUtil.java
@@ -27,4 +27,8 @@ public class ClientUtil {
   public static boolean areScanStartRowAndStopRowEqual(byte[] startRow, byte[] 
stopRow) {
 return startRow != null && startRow.length > 0 && Bytes.equals(startRow, 
stopRow);
   }
+
+  public static Cursor createCursor(byte[] row) {
+return new Cursor(row);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2f1923a8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java
new file mode 100644
index 000..1d4b4b5
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Cursor.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information

[4/5] hbase git commit: HBASE-15576 Scanning cursor to prevent blocking long time on ResultScanner.next()

2017-06-06 Thread yangzhe1991
http://git-wip-us.apache.org/repos/asf/hbase/blob/381c89b5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerCursor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerCursor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerCursor.java
new file mode 100644
index 000..e40b808
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerCursor.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTestConst;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterBase;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Threads;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(MediumTests.class)
+public class TestScannerCursor {
+
+  private static final Log LOG =
+  LogFactory.getLog(TestScannerCursor.class);
+
+  private final static HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  private static Table TABLE = null;
+
+  /**
+   * Table configuration
+   */
+  private static TableName TABLE_NAME = TableName.valueOf("TestScannerCursor");
+
+  private static int NUM_ROWS = 5;
+  private static byte[] ROW = Bytes.toBytes("testRow");
+  private static byte[][] ROWS = HTestConst.makeNAscii(ROW, NUM_ROWS);
+
+  private static int NUM_FAMILIES = 2;
+  private static byte[] FAMILY = Bytes.toBytes("testFamily");
+  private static byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, 
NUM_FAMILIES);
+
+  private static int NUM_QUALIFIERS = 2;
+  private static byte[] QUALIFIER = Bytes.toBytes("testQualifier");
+  private static byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 
NUM_QUALIFIERS);
+
+  private static int VALUE_SIZE = 10;
+  private static byte[] VALUE = Bytes.createMaxByteArray(VALUE_SIZE);
+
+  private static final int TIMEOUT = 4000;
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+Configuration conf = TEST_UTIL.getConfiguration();
+
+conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, TIMEOUT);
+conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, TIMEOUT);
+
+// Check the timeout condition after every cell
+conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 1);
+TEST_UTIL.startMiniCluster(1);
+
+TABLE = createTestTable(TABLE_NAME, ROWS, FAMILIES, QUALIFIERS, VALUE);
+
+  }
+
+  static Table createTestTable(TableName name, byte[][] rows, byte[][] 
families,
+  byte[][] qualifiers, byte[] cellValue) throws IOException {
+Table ht = TEST_UTIL.createTable(name, families);
+List puts = createPuts(rows, families, qualifiers, cellValue);
+ht.put(puts);
+return ht;
+  }
+
+  static ArrayList createPuts(byte[][] rows, byte[][] families, byte[][] 
qualifiers,
+  byte[] value) throws IOException {
+Put put;
+ArrayList puts = new ArrayList<>();
+
+for (int row = 0; row < rows.length; row++) {
+  put = new Put(rows[row]);
+  for (int fam = 0; fam < families.length; fam++) {
+for (int qual = 0; qual < qualifiers.length; qual++) {
+  KeyValue 

hbase git commit: HBASE-16392 Backup delete fault tolerance (Vladimir Rodionov)

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master da3c02363 -> 80e15aac2


HBASE-16392 Backup delete fault tolerance (Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/80e15aac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/80e15aac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/80e15aac

Branch: refs/heads/master
Commit: 80e15aac21c6b1969373171fce4c7635b991e172
Parents: da3c023
Author: tedyu 
Authored: Tue Jun 6 20:29:13 2017 -0700
Committer: tedyu 
Committed: Tue Jun 6 20:29:13 2017 -0700

--
 .../hbase/backup/impl/BackupAdminImpl.java  |  98 --
 .../hbase/backup/impl/BackupCommands.java   |  54 +-
 .../hbase/backup/impl/BackupSystemTable.java| 129 
 .../hbase/backup/impl/TableBackupClient.java|  63 +-
 .../hadoop/hbase/backup/TestBackupBase.java |  55 +++---
 .../backup/TestBackupDeleteWithFailures.java| 194 +++
 .../hbase/backup/TestBackupMultipleDeletes.java |   1 -
 .../TestIncrementalBackupDeleteTable.java   |   2 +-
 .../TestIncrementalBackupWithFailures.java  |   2 +-
 .../hadoop/hbase/backup/TestRemoteBackup.java   |   6 +
 .../hadoop/hbase/backup/TestRemoteRestore.java  |   7 +
 .../backup/TestRepairAfterFailedDelete.java |  93 +
 .../hbase/backup/TestSystemTableSnapshot.java   |   5 +-
 13 files changed, 603 insertions(+), 106 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/80e15aac/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
index 30cabfd..d1ee8e1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
@@ -97,21 +97,81 @@ public class BackupAdminImpl implements BackupAdmin {
 int totalDeleted = 0;
 Map allTablesMap = new HashMap();
 
+boolean deleteSessionStarted = false;
+boolean snapshotDone = false;
 try (final BackupSystemTable sysTable = new BackupSystemTable(conn)) {
-  for (int i = 0; i < backupIds.length; i++) {
-BackupInfo info = sysTable.readBackupInfo(backupIds[i]);
-if (info != null) {
-  String rootDir = info.getBackupRootDir();
-  HashSet allTables = allTablesMap.get(rootDir);
-  if (allTables == null) {
-allTables = new HashSet();
-allTablesMap.put(rootDir, allTables);
+
+  // Step 1: Make sure there is no active session
+  // is running by using startBackupSession API
+  // If there is an active session in progress, exception will be thrown
+  try {
+sysTable.startBackupSession();
+deleteSessionStarted = true;
+  } catch (IOException e) {
+LOG.warn("You can not run delete command while active backup session 
is in progress. \n"
++ "If there is no active backup session running, run backup repair 
utility to restore \n"
++"backup system integrity.");
+return -1;
+  }
+
+  // Step 2: Make sure there is no failed session
+  List list = sysTable.getBackupInfos(BackupState.RUNNING);
+  if (list.size() != 0) {
+// ailed sessions found
+LOG.warn("Failed backup session found. Run backup repair tool first.");
+return -1;
+  }
+
+  // Step 3: Record delete session
+  sysTable.startDeleteOperation(backupIds);
+  // Step 4: Snapshot backup system table
+  if (!BackupSystemTable.snapshotExists(conn)) {
+  BackupSystemTable.snapshot(conn);
+  } else {
+LOG.warn("Backup system table snapshot exists");
+  }
+  snapshotDone = true;
+  try {
+for (int i = 0; i < backupIds.length; i++) {
+  BackupInfo info = sysTable.readBackupInfo(backupIds[i]);
+  if (info != null) {
+String rootDir = info.getBackupRootDir();
+HashSet allTables = allTablesMap.get(rootDir);
+if (allTables == null) {
+  allTables = new HashSet();
+  allTablesMap.put(rootDir, allTables);
+}
+allTables.addAll(info.getTableNames());
+totalDeleted += deleteBackup(backupIds[i], sysTable);
+  }
+}
+finalizeDelete(allTablesMap, sysTable);
+// Finish
+sysTable.finishDeleteOperation();
+// delete snapshot
+BackupSystemTable.deleteSnapshot(conn);
+  } catch 

[1/3] hbase git commit: HBASE-18145 The flush may cause the corrupt data for reading

2017-06-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e0dbafd7c -> 9cb57ae35
  refs/heads/branch-1.3 69deecb1e -> 6784686cf
  refs/heads/master 858bccfcb -> da3c02363


HBASE-18145 The flush may cause the corrupt data for reading

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/da3c0236
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/da3c0236
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/da3c0236

Branch: refs/heads/master
Commit: da3c023635ef3ea0a70bd9a72b839b1861e1188d
Parents: 858bccf
Author: Chia-Ping Tsai 
Authored: Tue Jun 6 03:44:12 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 18:02:43 2017 -0700

--
 .../hadoop/hbase/regionserver/StoreScanner.java |  27 ++--
 .../hadoop/hbase/regionserver/TestStore.java| 140 ++-
 2 files changed, 151 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/da3c0236/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 6990e91..b063060 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -95,9 +95,12 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   private final long maxRowSize;
   private final long cellsPerHeartbeatCheck;
 
-  // Collects all the KVHeap that are eagerly getting closed during the
-  // course of a scan
-  private final List heapsForDelayedClose = new ArrayList<>();
+  // 1) Collects all the KVHeap that are eagerly getting closed during the
+  //course of a scan
+  // 2) Collects the unused memstore scanners. If we close the memstore 
scanners
+  //before sending data to client, the chunk may be reclaimed by other
+  //updates and the data will be corrupt.
+  private final List scannersForDelayedClose = new 
ArrayList<>();
 
   /**
* The number of KVs seen by the scanner. Includes explicitly skipped KVs, 
but not
@@ -485,23 +488,20 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 close(true);
   }
 
-  private void close(boolean withHeapClose) {
+  private void close(boolean withDelayedScannersClose) {
 if (this.closing) {
   return;
 }
-if (withHeapClose) {
+if (withDelayedScannersClose) {
   this.closing = true;
 }
 // Under test, we dont have a this.store
 if (this.store != null) {
   this.store.deleteChangedReaderObserver(this);
 }
-if (withHeapClose) {
+if (withDelayedScannersClose) {
+  clearAndClose(scannersForDelayedClose);
   clearAndClose(memStoreScannersAfterFlush);
-  for (KeyValueHeap h : this.heapsForDelayedClose) {
-h.close();
-  }
-  this.heapsForDelayedClose.clear();
   if (this.heap != null) {
 this.heap.close();
 this.currentScanners.clear();
@@ -509,7 +509,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   }
 } else {
   if (this.heap != null) {
-this.heapsForDelayedClose.add(this.heap);
+this.scannersForDelayedClose.add(this.heap);
 this.currentScanners.clear();
 this.heap = null;
   }
@@ -879,7 +879,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 // remove the older memstore scanner
 for (int i = currentScanners.size() - 1; i >=0; i--) {
   if (!currentScanners.get(i).isFileScanner()) {
-currentScanners.remove(i).close();
+scannersForDelayedClose.add(currentScanners.remove(i));
   } else {
 // we add the memstore scanner to the end of currentScanners
 break;
@@ -1121,8 +1121,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 }
 matcher.beforeShipped();
 // There wont be further fetch of Cells from these scanners. Just close.
-this.heapsForDelayedClose.forEach(KeyValueHeap::close);
-this.heapsForDelayedClose.clear();
+clearAndClose(scannersForDelayedClose);
 if (this.heap != null) {
   this.heap.shipped();
   // When switching from pread to stream, we will open a new scanner for 
each store file, but

http://git-wip-us.apache.org/repos/asf/hbase/blob/da3c0236/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
--
diff --git 

[2/3] hbase git commit: HBASE-18145 The flush may cause the corrupt data for reading

2017-06-06 Thread apurtell
HBASE-18145 The flush may cause the corrupt data for reading

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9cb57ae3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9cb57ae3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9cb57ae3

Branch: refs/heads/branch-1
Commit: 9cb57ae35e3f90d7906a565abed64ea021b9dce8
Parents: e0dbafd
Author: Chia-Ping Tsai 
Authored: Tue Jun 6 15:12:25 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 18:03:02 2017 -0700

--
 .../hadoop/hbase/regionserver/StoreScanner.java |   8 +-
 .../hadoop/hbase/regionserver/TestStore.java| 146 +++
 2 files changed, 153 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9cb57ae3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 05cfe24..b10c37d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -92,6 +92,11 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   protected final long cellsPerHeartbeatCheck;
 
   /**
+   * If we close the memstore scanners before sending data to client, the 
chunk may be reclaimed
+   * by other updates and the data will be corrupt.
+   */
+  private final List scannersForDelayedClose = new 
ArrayList<>();
+  /**
* The number of KVs seen by the scanner. Includes explicitly skipped KVs, 
but not
* KVs skipped via seeking to next row/column. TODO: estimate them?
*/
@@ -455,6 +460,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   public void close() {
 if (this.closing) return;
 this.closing = true;
+clearAndClose(scannersForDelayedClose);
 clearAndClose(memStoreScannersAfterFlush);
 // Under test, we dont have a this.store
 if (this.store != null)
@@ -878,7 +884,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 // remove the older memstore scanner
 for (int i = 0; i < currentScanners.size(); i++) {
   if (!currentScanners.get(i).isFileScanner()) {
-currentScanners.remove(i).close();
+scannersForDelayedClose.add(currentScanners.remove(i));
 break;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/9cb57ae3/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 1dc6ae5..573de11 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -37,6 +37,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.ListIterator;
 import java.util.NavigableSet;
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentSkipListSet;
@@ -74,6 +75,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
+import static 
org.apache.hadoop.hbase.regionserver.MemStoreChunkPool.CHUNK_POOL_MAXSIZE_KEY;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
 import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
@@ -1228,6 +1230,67 @@ public class TestStore {
 
   }
 
+  @Test
+  public void testReclaimChunkWhenScaning() throws IOException {
+Configuration conf = HBaseConfiguration.create();
+conf.setFloat(CHUNK_POOL_MAXSIZE_KEY, 1);
+init("testReclaimChunkWhenScaning", conf);
+final long ts = EnvironmentEdgeManager.currentTime();
+final long seqId = 100;
+byte[] value = Bytes.toBytes("value");
+// older data whihc shouldn't be "seen" by client
+store.add(createCell(qf1, ts, seqId, value));
+store.add(createCell(qf2, ts, seqId, value));
+store.add(createCell(qf3, ts, seqId, value));
+TreeSet quals = new TreeSet<>(Bytes.BYTES_COMPARATOR);
+quals.add(qf1);
+

[3/3] hbase git commit: HBASE-18145 The flush may cause the corrupt data for reading

2017-06-06 Thread apurtell
HBASE-18145 The flush may cause the corrupt data for reading

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6784686c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6784686c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6784686c

Branch: refs/heads/branch-1.3
Commit: 6784686cfa5edd9d383a03f07419322ac114ea91
Parents: 69deecb
Author: Chia-Ping Tsai 
Authored: Tue Jun 6 15:15:23 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 18:03:13 2017 -0700

--
 .../hadoop/hbase/regionserver/StoreScanner.java |   8 +-
 .../hadoop/hbase/regionserver/TestStore.java| 146 +++
 2 files changed, 153 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6784686c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index c967071..de66d4e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -89,6 +89,11 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   protected final long cellsPerHeartbeatCheck;
 
   /**
+   * If we close the memstore scanners before sending data to client, the 
chunk may be reclaimed
+   * by other updates and the data will be corrupt.
+   */
+  private final List scannersForDelayedClose = new 
ArrayList<>();
+  /**
* The number of KVs seen by the scanner. Includes explicitly skipped KVs, 
but not
* KVs skipped via seeking to next row/column. TODO: estimate them?
*/
@@ -437,6 +442,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
   public void close() {
 if (this.closing) return;
 this.closing = true;
+clearAndClose(scannersForDelayedClose);
 clearAndClose(memStoreScannersAfterFlush);
 // Under test, we dont have a this.store
 if (this.store != null)
@@ -850,7 +856,7 @@ public class StoreScanner extends 
NonReversedNonLazyKeyValueScanner
 // remove the older memstore scanner
 for (int i = 0; i < currentScanners.size(); i++) {
   if (!currentScanners.get(i).isFileScanner()) {
-currentScanners.remove(i).close();
+scannersForDelayedClose.add(currentScanners.remove(i));
 break;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6784686c/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 4cebf1e..ed4ee57 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -37,6 +37,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.ListIterator;
 import java.util.NavigableSet;
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentSkipListSet;
@@ -74,6 +75,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
+import static 
org.apache.hadoop.hbase.regionserver.MemStoreChunkPool.CHUNK_POOL_MAXSIZE_KEY;
 import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
 import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
@@ -1206,6 +1208,67 @@ public class TestStore {
 
   }
 
+  @Test
+  public void testReclaimChunkWhenScaning() throws IOException {
+Configuration conf = HBaseConfiguration.create();
+conf.setFloat(CHUNK_POOL_MAXSIZE_KEY, 1);
+init("testReclaimChunkWhenScaning", conf);
+final long ts = EnvironmentEdgeManager.currentTime();
+final long seqId = 100;
+byte[] value = Bytes.toBytes("value");
+// older data whihc shouldn't be "seen" by client
+store.add(createCell(qf1, ts, seqId, value));
+store.add(createCell(qf2, ts, seqId, value));
+store.add(createCell(qf3, ts, seqId, value));
+TreeSet quals = new TreeSet<>(Bytes.BYTES_COMPARATOR);
+quals.add(qf1);
+

[1/4] hbase git commit: HBASE-18132 Low replication should be checked in period in case of datanode rolling upgrade (Allan Yang)

2017-06-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 9c1efc9f9 -> e0dbafd7c
  refs/heads/branch-1.2 04bbdc835 -> d46b7832d
  refs/heads/branch-1.3 5812e41bc -> 69deecb1e
  refs/heads/master 1950acc67 -> 858bccfcb


HBASE-18132 Low replication should be checked in period in case of datanode 
rolling upgrade (Allan Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e0dbafd7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e0dbafd7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e0dbafd7

Branch: refs/heads/branch-1
Commit: e0dbafd7cc70efe3dad3ef69effc402d5fb16095
Parents: 9c1efc9
Author: Andrew Purtell 
Authored: Tue Jun 6 17:15:33 2017 -0700
Committer: Andrew Purtell 
Committed: Tue Jun 6 17:21:11 2017 -0700

--
 .../hadoop/hbase/regionserver/LogRoller.java| 27 ++
 .../hadoop/hbase/regionserver/wal/FSHLog.java   | 14 +++-
 .../wal/TestWALOpenAfterDNRollingStart.java | 86 
 3 files changed, 126 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e0dbafd7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index ccc951a..0e5f284 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -30,6 +30,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
 import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
 import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
@@ -65,6 +66,8 @@ public class LogRoller extends HasThread {
   // Period to roll log.
   private final long rollperiod;
   private final int threadWakeFrequency;
+  // The interval to check low replication on hlog's pipeline
+  private long checkLowReplicationInterval;
 
   public void addWAL(final WAL wal) {
 if (null == walNeedsRoll.putIfAbsent(wal, Boolean.FALSE)) {
@@ -101,6 +104,8 @@ public class LogRoller extends HasThread {
   getLong("hbase.regionserver.logroll.period", 360);
 this.threadWakeFrequency = this.server.getConfiguration().
   getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
+this.checkLowReplicationInterval = this.server.getConfiguration().getLong(
+"hbase.regionserver.hlog.check.lowreplication.interval", 30 * 1000);
   }
 
   @Override
@@ -112,10 +117,32 @@ public class LogRoller extends HasThread {
 super.interrupt();
   }
 
+  /**
+   * we need to check low replication in period, see HBASE-18132
+   */
+  void checkLowReplication(long now) {
+try {
+  for (Entry entry : walNeedsRoll.entrySet()) {
+WAL wal = entry.getKey();
+boolean neeRollAlready = entry.getValue();
+if(wal instanceof FSHLog && !neeRollAlready) {
+  FSHLog hlog = (FSHLog)wal;
+  if ((now - hlog.getLastTimeCheckLowReplication())
+  > this.checkLowReplicationInterval) {
+hlog.checkLogRoll();
+  }
+}
+  }
+} catch (Throwable e) {
+  LOG.warn("Failed checking low replication", e);
+}
+  }
+
   @Override
   public void run() {
 while (!server.isStopped()) {
   long now = System.currentTimeMillis();
+  checkLowReplication(now);
   boolean periodic = false;
   if (!rollLog.get()) {
 periodic = (now - this.lastrolltime) > this.rollperiod;

http://git-wip-us.apache.org/repos/asf/hbase/blob/e0dbafd7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index 8d97b64..d5cf6bb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -349,6 +349,9 @@ public class FSHLog implements WAL {
 
   private final AtomicInteger closeErrorCount = new AtomicInteger();
 
+  // Last time to check low replication on hlog's pipeline
+  private volatile long lastTimeCheckLowReplication = 

[2/4] hbase git commit: HBASE-18132 Low replication should be checked in period in case of datanode rolling upgrade (Allan Yang)

2017-06-06 Thread apurtell
HBASE-18132 Low replication should be checked in period in case of datanode 
rolling upgrade (Allan Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/858bccfc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/858bccfc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/858bccfc

Branch: refs/heads/master
Commit: 858bccfcb8eb07cd6d6b65cb208726e2f24bd0b5
Parents: 1950acc
Author: Andrew Purtell 
Authored: Tue Jun 6 17:15:33 2017 -0700
Committer: Andrew Purtell 
Committed: Tue Jun 6 17:21:21 2017 -0700

--
 .../hadoop/hbase/regionserver/LogRoller.java| 27 ++
 .../hadoop/hbase/regionserver/wal/FSHLog.java   | 21 +++--
 .../wal/TestWALOpenAfterDNRollingStart.java | 94 
 3 files changed, 136 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/858bccfc/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index 9d1bc4b..80b6825 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -30,6 +30,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
 import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
@@ -61,6 +62,8 @@ public class LogRoller extends HasThread implements Closeable 
{
   // Period to roll log.
   private final long rollperiod;
   private final int threadWakeFrequency;
+  // The interval to check low replication on hlog's pipeline
+  private long checkLowReplicationInterval;
 
   private volatile boolean running = true;
 
@@ -99,6 +102,8 @@ public class LogRoller extends HasThread implements 
Closeable {
   getLong("hbase.regionserver.logroll.period", 360);
 this.threadWakeFrequency = this.server.getConfiguration().
   getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
+this.checkLowReplicationInterval = this.server.getConfiguration().getLong(
+"hbase.regionserver.hlog.check.lowreplication.interval", 30 * 1000);
   }
 
   @Override
@@ -110,10 +115,32 @@ public class LogRoller extends HasThread implements 
Closeable {
 super.interrupt();
   }
 
+  /**
+   * we need to check low replication in period, see HBASE-18132
+   */
+  void checkLowReplication(long now) {
+try {
+  for (Entry entry : walNeedsRoll.entrySet()) {
+WAL wal = entry.getKey();
+boolean neeRollAlready = entry.getValue();
+if(wal instanceof FSHLog && !neeRollAlready) {
+  FSHLog hlog = (FSHLog)wal;
+  if ((now - hlog.getLastTimeCheckLowReplication())
+  > this.checkLowReplicationInterval) {
+hlog.checkLogRoll();
+  }
+}
+  }
+} catch (Throwable e) {
+  LOG.warn("Failed checking low replication", e);
+}
+  }
+
   @Override
   public void run() {
 while (running) {
   long now = System.currentTimeMillis();
+  checkLowReplication(now);
   boolean periodic = false;
   if (!rollLog.get()) {
 periodic = (now - this.lastrolltime) > this.rollperiod;

http://git-wip-us.apache.org/repos/asf/hbase/blob/858bccfc/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index caf07a2..77ac1d1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -47,11 +47,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ClassSize;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.HasThread;
-import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.util.*;
 import 

[4/4] hbase git commit: HBASE-18132 Low replication should be checked in period in case of datanode rolling upgrade (Allan Yang)

2017-06-06 Thread apurtell
HBASE-18132 Low replication should be checked in period in case of datanode 
rolling upgrade (Allan Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d46b7832
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d46b7832
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d46b7832

Branch: refs/heads/branch-1.2
Commit: d46b7832d8b44cc02f543867683dae29544944aa
Parents: 04bbdc8
Author: Andrew Purtell 
Authored: Tue Jun 6 17:15:33 2017 -0700
Committer: Andrew Purtell 
Committed: Tue Jun 6 17:24:31 2017 -0700

--
 .../hadoop/hbase/regionserver/LogRoller.java| 27 ++
 .../hadoop/hbase/regionserver/wal/FSHLog.java   | 14 +++-
 .../wal/TestWALOpenAfterDNRollingStart.java | 86 
 3 files changed, 126 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d46b7832/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index ccc951a..0e5f284 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -30,6 +30,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
 import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
 import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
@@ -65,6 +66,8 @@ public class LogRoller extends HasThread {
   // Period to roll log.
   private final long rollperiod;
   private final int threadWakeFrequency;
+  // The interval to check low replication on hlog's pipeline
+  private long checkLowReplicationInterval;
 
   public void addWAL(final WAL wal) {
 if (null == walNeedsRoll.putIfAbsent(wal, Boolean.FALSE)) {
@@ -101,6 +104,8 @@ public class LogRoller extends HasThread {
   getLong("hbase.regionserver.logroll.period", 360);
 this.threadWakeFrequency = this.server.getConfiguration().
   getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
+this.checkLowReplicationInterval = this.server.getConfiguration().getLong(
+"hbase.regionserver.hlog.check.lowreplication.interval", 30 * 1000);
   }
 
   @Override
@@ -112,10 +117,32 @@ public class LogRoller extends HasThread {
 super.interrupt();
   }
 
+  /**
+   * we need to check low replication in period, see HBASE-18132
+   */
+  void checkLowReplication(long now) {
+try {
+  for (Entry entry : walNeedsRoll.entrySet()) {
+WAL wal = entry.getKey();
+boolean neeRollAlready = entry.getValue();
+if(wal instanceof FSHLog && !neeRollAlready) {
+  FSHLog hlog = (FSHLog)wal;
+  if ((now - hlog.getLastTimeCheckLowReplication())
+  > this.checkLowReplicationInterval) {
+hlog.checkLogRoll();
+  }
+}
+  }
+} catch (Throwable e) {
+  LOG.warn("Failed checking low replication", e);
+}
+  }
+
   @Override
   public void run() {
 while (!server.isStopped()) {
   long now = System.currentTimeMillis();
+  checkLowReplication(now);
   boolean periodic = false;
   if (!rollLog.get()) {
 periodic = (now - this.lastrolltime) > this.rollperiod;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d46b7832/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index 7710f2b..4b81dda 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -351,6 +351,9 @@ public class FSHLog implements WAL {
 
   private final AtomicInteger closeErrorCount = new AtomicInteger();
 
+  // Last time to check low replication on hlog's pipeline
+  private volatile long lastTimeCheckLowReplication = 
EnvironmentEdgeManager.currentTime();
+
 
   /**
* WAL Comparator; it compares the timestamp (log filenum), present in the 
log file name.
@@ -1299,7 +1302,7 @@ public class FSHLog implements WAL {
   /**
* Schedule a log roll if 

[3/4] hbase git commit: HBASE-18132 Low replication should be checked in period in case of datanode rolling upgrade (Allan Yang)

2017-06-06 Thread apurtell
HBASE-18132 Low replication should be checked in period in case of datanode 
rolling upgrade (Allan Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/69deecb1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/69deecb1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/69deecb1

Branch: refs/heads/branch-1.3
Commit: 69deecb1e4cc81f32878c528e63af0b742bdb735
Parents: 5812e41
Author: Andrew Purtell 
Authored: Tue Jun 6 17:15:33 2017 -0700
Committer: Andrew Purtell 
Committed: Tue Jun 6 17:24:29 2017 -0700

--
 .../hadoop/hbase/regionserver/LogRoller.java| 27 ++
 .../hadoop/hbase/regionserver/wal/FSHLog.java   | 14 +++-
 .../wal/TestWALOpenAfterDNRollingStart.java | 86 
 3 files changed, 126 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/69deecb1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
index ccc951a..0e5f284 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
@@ -30,6 +30,7 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
 import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
 import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
@@ -65,6 +66,8 @@ public class LogRoller extends HasThread {
   // Period to roll log.
   private final long rollperiod;
   private final int threadWakeFrequency;
+  // The interval to check low replication on hlog's pipeline
+  private long checkLowReplicationInterval;
 
   public void addWAL(final WAL wal) {
 if (null == walNeedsRoll.putIfAbsent(wal, Boolean.FALSE)) {
@@ -101,6 +104,8 @@ public class LogRoller extends HasThread {
   getLong("hbase.regionserver.logroll.period", 360);
 this.threadWakeFrequency = this.server.getConfiguration().
   getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
+this.checkLowReplicationInterval = this.server.getConfiguration().getLong(
+"hbase.regionserver.hlog.check.lowreplication.interval", 30 * 1000);
   }
 
   @Override
@@ -112,10 +117,32 @@ public class LogRoller extends HasThread {
 super.interrupt();
   }
 
+  /**
+   * we need to check low replication in period, see HBASE-18132
+   */
+  void checkLowReplication(long now) {
+try {
+  for (Entry entry : walNeedsRoll.entrySet()) {
+WAL wal = entry.getKey();
+boolean neeRollAlready = entry.getValue();
+if(wal instanceof FSHLog && !neeRollAlready) {
+  FSHLog hlog = (FSHLog)wal;
+  if ((now - hlog.getLastTimeCheckLowReplication())
+  > this.checkLowReplicationInterval) {
+hlog.checkLogRoll();
+  }
+}
+  }
+} catch (Throwable e) {
+  LOG.warn("Failed checking low replication", e);
+}
+  }
+
   @Override
   public void run() {
 while (!server.isStopped()) {
   long now = System.currentTimeMillis();
+  checkLowReplication(now);
   boolean periodic = false;
   if (!rollLog.get()) {
 periodic = (now - this.lastrolltime) > this.rollperiod;

http://git-wip-us.apache.org/repos/asf/hbase/blob/69deecb1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index 7e1fb69..f508bd7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -351,6 +351,9 @@ public class FSHLog implements WAL {
 
   private final AtomicInteger closeErrorCount = new AtomicInteger();
 
+  // Last time to check low replication on hlog's pipeline
+  private volatile long lastTimeCheckLowReplication = 
EnvironmentEdgeManager.currentTime();
+
 
   /**
* WAL Comparator; it compares the timestamp (log filenum), present in the 
log file name.
@@ -1303,7 +1306,7 @@ public class FSHLog implements WAL {
   /**
* Schedule a log roll if 

[4/4] hbase git commit: HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell during region merge/split

2017-06-06 Thread apurtell
HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell 
during region merge/split

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9c1efc9f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9c1efc9f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9c1efc9f

Branch: refs/heads/branch-1
Commit: 9c1efc9f9d11d6b59b54052da4a77f981102931b
Parents: ea3075e
Author: huzheng 
Authored: Tue May 23 16:05:16 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 17:13:16 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  36 +++-
 .../TestFromClientGetWithClosestRowBefore.java  | 164 +++
 2 files changed, 191 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9c1efc9f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index cea7c6f..fc737a9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -136,6 +136,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.FilterWrapper;
 import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.TimeRange;
@@ -2726,15 +2727,13 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 startRegionOperation(Operation.GET);
 this.readRequestsCount.increment();
 try {
-  Store store = getStore(family);
-  // get the closest key. (HStore.getRowKeyAtOrBefore can return null)
-  Cell key = store.getRowKeyAtOrBefore(row);
   Result result = null;
-  if (key != null) {
-Get get = new Get(CellUtil.cloneRow(key));
-get.addFamily(family);
-result = get(get);
-  }
+  Get get = new Get(row);
+  get.addFamily(family);
+  get.setClosestRowBefore(true);
+  result = get(get);
+  // for compatibility
+  result = result.isEmpty() ? null : result;
   if (coprocessorHost != null) {
 coprocessorHost.postGetClosestRowBefore(row, family, result);
   }
@@ -7224,6 +7223,20 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return get(get, withCoprocessor, HConstants.NO_NONCE, HConstants.NO_NONCE);
   }
 
+  private Scan buildScanForGetWithClosestRowBefore(Get get) throws IOException 
{
+Scan scan = new Scan().withStartRow(get.getRow())
+
.addFamily(get.getFamilyMap().keySet().iterator().next()).setReversed(true)
+.withStopRow(HConstants.EMPTY_END_ROW, false).setLimit(1);
+if (this.getRegionInfo().isMetaRegion()) {
+  int delimiterIdx =
+  KeyValue.getDelimiter(get.getRow(), 0, get.getRow().length, 
HConstants.DELIMITER);
+  if (delimiterIdx >= 0) {
+scan.setFilter(new PrefixFilter(Bytes.copy(get.getRow(), 0, 
delimiterIdx + 1)));
+  }
+}
+return scan;
+  }
+
   @Override
   public List get(Get get, boolean withCoprocessor, long nonceGroup, 
long nonce)
   throws IOException {
@@ -7236,7 +7249,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 }
 long before = EnvironmentEdgeManager.currentTime();
-Scan scan = new Scan(get);
+Scan scan;
+if (get.isClosestRowBefore()) {
+  scan = buildScanForGetWithClosestRowBefore(get);
+} else {
+  scan = new Scan(get);
+}
 
 RegionScanner scanner = null;
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/9c1efc9f/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
new file mode 100644
index 000..781977c
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license 

[3/4] hbase git commit: HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell during region merge/split

2017-06-06 Thread apurtell
HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell 
during region merge/split

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b9ff18e9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b9ff18e9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b9ff18e9

Branch: refs/heads/branch-1.1
Commit: b9ff18e94e614ec83afaf0216783b7a27dec9a62
Parents: 70789c7
Author: huzheng 
Authored: Wed May 24 16:46:06 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 16:00:01 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  36 +++-
 .../TestFromClientGetWithClosestRowBefore.java  | 164 +++
 2 files changed, 191 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b9ff18e9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 3263873..ccb6a5c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -118,6 +118,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.FilterWrapper;
 import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -2493,15 +2494,13 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 startRegionOperation(Operation.GET);
 this.readRequestsCount.increment();
 try {
-  Store store = getStore(family);
-  // get the closest key. (HStore.getRowKeyAtOrBefore can return null)
-  Cell key = store.getRowKeyAtOrBefore(row);
   Result result = null;
-  if (key != null) {
-Get get = new Get(CellUtil.cloneRow(key));
-get.addFamily(family);
-result = get(get);
-  }
+  Get get = new Get(row);
+  get.addFamily(family);
+  get.setClosestRowBefore(true);
+  result = get(get);
+  // for compatibility
+  result = result.isEmpty() ? null : result;
   if (coprocessorHost != null) {
 coprocessorHost.postGetClosestRowBefore(row, family, result);
   }
@@ -6582,6 +6581,20 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return Result.create(results, get.isCheckExistenceOnly() ? 
!results.isEmpty() : null, stale);
   }
 
+  private Scan buildScanForGetWithClosestRowBefore(Get get) throws IOException 
{
+Scan scan = new Scan().setStartRow(get.getRow())
+
.addFamily(get.getFamilyMap().keySet().iterator().next()).setReversed(true)
+.setStopRow(HConstants.EMPTY_END_ROW);
+if (this.getRegionInfo().isMetaRegion()) {
+  int delimiterIdx =
+  KeyValue.getDelimiter(get.getRow(), 0, get.getRow().length, 
HConstants.DELIMITER);
+  if (delimiterIdx >= 0) {
+scan.setFilter(new PrefixFilter(Bytes.copy(get.getRow(), 0, 
delimiterIdx + 1)));
+  }
+}
+return scan;
+  }
+
   @Override
   public List get(Get get, boolean withCoprocessor) throws IOException {
 
@@ -6594,7 +6607,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
}
 }
 
-Scan scan = new Scan(get);
+Scan scan;
+if (get.isClosestRowBefore()) {
+  scan = buildScanForGetWithClosestRowBefore(get);
+} else {
+  scan = new Scan(get);
+}
 
 RegionScanner scanner = null;
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9ff18e9/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
new file mode 100644
index 000..781977c
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional 

[1/4] hbase git commit: HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell during region merge/split

2017-06-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 ea3075e7f -> 9c1efc9f9
  refs/heads/branch-1.1 70789c733 -> b9ff18e94
  refs/heads/branch-1.2 b9d8f3b85 -> 04bbdc835
  refs/heads/branch-1.3 041f14341 -> 5812e41bc


HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell 
during region merge/split

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5812e41b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5812e41b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5812e41b

Branch: refs/heads/branch-1.3
Commit: 5812e41bca63463798c2f9beaeb30377291a4419
Parents: 041f143
Author: huzheng 
Authored: Wed May 24 16:46:06 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 14:16:49 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  36 +++-
 .../TestFromClientGetWithClosestRowBefore.java  | 164 +++
 2 files changed, 191 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5812e41b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index a603910..61d532b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -117,6 +117,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.FilterWrapper;
 import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -2602,15 +2603,13 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 startRegionOperation(Operation.GET);
 this.readRequestsCount.increment();
 try {
-  Store store = getStore(family);
-  // get the closest key. (HStore.getRowKeyAtOrBefore can return null)
-  Cell key = store.getRowKeyAtOrBefore(row);
   Result result = null;
-  if (key != null) {
-Get get = new Get(CellUtil.cloneRow(key));
-get.addFamily(family);
-result = get(get);
-  }
+  Get get = new Get(row);
+  get.addFamily(family);
+  get.setClosestRowBefore(true);
+  result = get(get);
+  // for compatibility
+  result = result.isEmpty() ? null : result;
   if (coprocessorHost != null) {
 coprocessorHost.postGetClosestRowBefore(row, family, result);
   }
@@ -7010,6 +7009,20 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return get(get, withCoprocessor, HConstants.NO_NONCE, HConstants.NO_NONCE);
   }
 
+  private Scan buildScanForGetWithClosestRowBefore(Get get) throws IOException 
{
+Scan scan = new Scan().setStartRow(get.getRow())
+
.addFamily(get.getFamilyMap().keySet().iterator().next()).setReversed(true)
+.setStopRow(HConstants.EMPTY_END_ROW);
+if (this.getRegionInfo().isMetaRegion()) {
+  int delimiterIdx =
+  KeyValue.getDelimiter(get.getRow(), 0, get.getRow().length, 
HConstants.DELIMITER);
+  if (delimiterIdx >= 0) {
+scan.setFilter(new PrefixFilter(Bytes.copy(get.getRow(), 0, 
delimiterIdx + 1)));
+  }
+}
+return scan;
+  }
+
   @Override
   public List get(Get get, boolean withCoprocessor, long nonceGroup, 
long nonce)
   throws IOException {
@@ -7022,7 +7035,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   }
 }
 long before = EnvironmentEdgeManager.currentTime();
-Scan scan = new Scan(get);
+Scan scan;
+if (get.isClosestRowBefore()) {
+  scan = buildScanForGetWithClosestRowBefore(get);
+} else {
+  scan = new Scan(get);
+}
 
 RegionScanner scanner = null;
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/5812e41b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
new file mode 100644
index 000..781977c
--- /dev/null
+++ 

[2/4] hbase git commit: HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell during region merge/split

2017-06-06 Thread apurtell
HBASE-18066: Get with closest_row_before on hbase:meta can return empty Cell 
during region merge/split

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04bbdc83
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04bbdc83
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04bbdc83

Branch: refs/heads/branch-1.2
Commit: 04bbdc835c6a875576290eca1a5a1ad1d6f7577c
Parents: b9d8f3b
Author: huzheng 
Authored: Wed May 24 16:46:06 2017 +0800
Committer: Andrew Purtell 
Committed: Tue Jun 6 15:50:41 2017 -0700

--
 .../hadoop/hbase/regionserver/HRegion.java  |  36 +++-
 .../TestFromClientGetWithClosestRowBefore.java  | 164 +++
 2 files changed, 191 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04bbdc83/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 7006dbc..d0d457f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -117,6 +117,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.FilterWrapper;
 import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -2518,15 +2519,13 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 startRegionOperation(Operation.GET);
 this.readRequestsCount.increment();
 try {
-  Store store = getStore(family);
-  // get the closest key. (HStore.getRowKeyAtOrBefore can return null)
-  Cell key = store.getRowKeyAtOrBefore(row);
   Result result = null;
-  if (key != null) {
-Get get = new Get(CellUtil.cloneRow(key));
-get.addFamily(family);
-result = get(get);
-  }
+  Get get = new Get(row);
+  get.addFamily(family);
+  get.setClosestRowBefore(true);
+  result = get(get);
+  // for compatibility
+  result = result.isEmpty() ? null : result;
   if (coprocessorHost != null) {
 coprocessorHost.postGetClosestRowBefore(row, family, result);
   }
@@ -6811,6 +6810,20 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 return Result.create(results, get.isCheckExistenceOnly() ? 
!results.isEmpty() : null, stale);
   }
 
+  private Scan buildScanForGetWithClosestRowBefore(Get get) throws IOException 
{
+Scan scan = new Scan().setStartRow(get.getRow())
+
.addFamily(get.getFamilyMap().keySet().iterator().next()).setReversed(true)
+.setStopRow(HConstants.EMPTY_END_ROW);
+if (this.getRegionInfo().isMetaRegion()) {
+  int delimiterIdx =
+  KeyValue.getDelimiter(get.getRow(), 0, get.getRow().length, 
HConstants.DELIMITER);
+  if (delimiterIdx >= 0) {
+scan.setFilter(new PrefixFilter(Bytes.copy(get.getRow(), 0, 
delimiterIdx + 1)));
+  }
+}
+return scan;
+  }
+
   @Override
   public List get(Get get, boolean withCoprocessor) throws IOException {
 
@@ -6823,7 +6836,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
}
 }
 
-Scan scan = new Scan(get);
+Scan scan;
+if (get.isClosestRowBefore()) {
+  scan = buildScanForGetWithClosestRowBefore(get);
+} else {
+  scan = new Scan(get);
+}
 
 RegionScanner scanner = null;
 try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/04bbdc83/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
new file mode 100644
index 000..781977c
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientGetWithClosestRowBefore.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional 

hbase git commit: HBASE-18173 Append class

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 abeb09693 -> eec122526


HBASE-18173 Append class


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eec12252
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eec12252
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eec12252

Branch: refs/heads/HBASE-14850
Commit: eec1225268225d9de62fb98b90e2b75f035f5e5d
Parents: abeb096
Author: tedyu 
Authored: Tue Jun 6 16:42:45 2017 -0700
Committer: tedyu 
Committed: Tue Jun 6 16:42:45 2017 -0700

--
 hbase-native-client/core/BUCK |  11 +++
 hbase-native-client/core/append-test.cc   | 106 +
 hbase-native-client/core/append.cc|  54 +++
 hbase-native-client/core/append.h |  56 +++
 hbase-native-client/core/client-test.cc   |  30 ++
 hbase-native-client/core/raw-async-table.cc   |  14 +++
 hbase-native-client/core/raw-async-table.h|   2 +
 hbase-native-client/core/request-converter.cc |  12 +++
 hbase-native-client/core/request-converter.h  |   3 +
 hbase-native-client/core/table.cc |   5 +
 hbase-native-client/core/table.h  |   6 ++
 11 files changed, 299 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eec12252/hbase-native-client/core/BUCK
--
diff --git a/hbase-native-client/core/BUCK b/hbase-native-client/core/BUCK
index 81fd4a7..47e97f5 100644
--- a/hbase-native-client/core/BUCK
+++ b/hbase-native-client/core/BUCK
@@ -43,6 +43,7 @@ cxx_library(
 "put.h",
 "delete.h",
 "scan.h",
+"append.h",
 "result.h",
 "result-scanner.h",
 "request-converter.h",
@@ -82,6 +83,7 @@ cxx_library(
 "put.cc",
 "delete.cc",
 "scan.cc",
+"append.cc",
 "scan-result-cache.cc",
 "raw-async-table.cc",
 "result.cc",
@@ -192,6 +194,15 @@ cxx_test(
 ],
 run_test_separately=True,)
 cxx_test(
+name="append-test",
+srcs=[
+"append-test.cc",
+],
+deps=[
+":core",
+],
+run_test_separately=True,)
+cxx_test(
 name="retry-test",
 srcs=[
 "async-rpc-retrying-test.cc",

http://git-wip-us.apache.org/repos/asf/hbase/blob/eec12252/hbase-native-client/core/append-test.cc
--
diff --git a/hbase-native-client/core/append-test.cc 
b/hbase-native-client/core/append-test.cc
new file mode 100644
index 000..619826c
--- /dev/null
+++ b/hbase-native-client/core/append-test.cc
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include 
+#include 
+
+#include "core/mutation.h"
+#include "core/append.h"
+#include "utils/time-util.h"
+
+using hbase::Append;
+using hbase::Cell;
+using hbase::CellType;
+using hbase::Mutation;
+using hbase::TimeUtil;
+
+const constexpr int64_t Mutation::kLatestTimestamp;
+
+TEST(Append, Row) {
+  Append append{"foo"};
+  EXPECT_EQ("foo", append.row());
+}
+
+TEST(Append, Durability) {
+  Append append{"row"};
+  EXPECT_EQ(hbase::pb::MutationProto_Durability_USE_DEFAULT, 
append.Durability());
+
+  auto skipWal = hbase::pb::MutationProto_Durability_SKIP_WAL;
+  append.SetDurability(skipWal);
+  EXPECT_EQ(skipWal, append.Durability());
+}
+
+TEST(Append, Timestamp) {
+  Append append{"row"};
+
+  // test default timestamp
+  EXPECT_EQ(Mutation::kLatestTimestamp, append.TimeStamp());
+
+  // set custom timestamp
+  auto ts = TimeUtil::ToMillis(TimeUtil::GetNowNanos());
+  append.SetTimeStamp(ts);
+  EXPECT_EQ(ts, append.TimeStamp());
+
+  // Add a column with custom timestamp
+  append.Add("f", "q", "v");
+  auto  = append.FamilyMap().at("f")[0];
+  EXPECT_EQ(ts, cell->Timestamp());
+}
+
+TEST(Append, HasFamilies) {
+  Append append{"row"};
+
+  EXPECT_EQ(false, append.HasFamilies());
+
+  append.Add("f", "q", "v");
+  EXPECT_EQ(true, 

hbase git commit: HBASE-18126 Increment class - addendum changes table name for test

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 bde59b250 -> abeb09693


HBASE-18126 Increment class - addendum changes table name for test


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/abeb0969
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/abeb0969
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/abeb0969

Branch: refs/heads/HBASE-14850
Commit: abeb096933ac7056b4ab5b093c6fbf01af0835d7
Parents: bde59b2
Author: tedyu 
Authored: Tue Jun 6 16:28:32 2017 -0700
Committer: tedyu 
Committed: Tue Jun 6 16:28:32 2017 -0700

--
 hbase-native-client/core/client-test.cc | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/abeb0969/hbase-native-client/core/client-test.cc
--
diff --git a/hbase-native-client/core/client-test.cc 
b/hbase-native-client/core/client-test.cc
index 743e928..d166f1c 100644
--- a/hbase-native-client/core/client-test.cc
+++ b/hbase-native-client/core/client-test.cc
@@ -191,10 +191,10 @@ TEST_F(ClientTest, PutGetDelete) {
 
 TEST_F(ClientTest, Increment) {
   // Using TestUtil to populate test data
-  ClientTest::test_util->CreateTable("t", "d");
+  ClientTest::test_util->CreateTable("t1", "d");
 
   // Create TableName and Row to be fetched from HBase
-  auto tn = folly::to("t");
+  auto tn = folly::to("t1");
   auto row = "test1";
 
   // Create a client



[3/3] hbase git commit: HBASE-17907 [C++] End to end Scans from Client/Table

2017-06-06 Thread enis
HBASE-17907 [C++] End to end Scans from Client/Table


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bde59b25
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bde59b25
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bde59b25

Branch: refs/heads/HBASE-14850
Commit: bde59b25044ecbbe44dd016737bd418b5873d19f
Parents: 3de6ecd
Author: Enis Soztutar 
Authored: Tue Jun 6 15:16:09 2017 -0700
Committer: Enis Soztutar 
Committed: Tue Jun 6 15:16:09 2017 -0700

--
 hbase-native-client/bin/format-code.sh  |   4 +-
 .../connection/client-handler.cc|   5 +-
 hbase-native-client/connection/request.h|   6 +
 hbase-native-client/connection/response.h   |  22 +-
 hbase-native-client/connection/rpc-client.cc|   2 -
 hbase-native-client/connection/rpc-client.h |  12 +-
 hbase-native-client/core/BUCK   |  41 +-
 .../core/async-client-scanner.cc| 142 ++
 hbase-native-client/core/async-client-scanner.h | 119 +
 .../core/async-rpc-retrying-caller-factory.h| 124 -
 .../core/async-rpc-retrying-caller.cc   |  12 +-
 .../core/async-rpc-retrying-caller.h|   2 -
 .../core/async-scan-rpc-retrying-caller.cc  | 447 +++
 .../core/async-scan-rpc-retrying-caller.h   | 233 ++
 .../core/async-table-result-scanner.cc  | 161 +++
 .../core/async-table-result-scanner.h   |  98 
 hbase-native-client/core/cell-test.cc   |  24 +
 hbase-native-client/core/cell.cc|  21 +-
 hbase-native-client/core/cell.h |   5 +-
 hbase-native-client/core/client-test.cc |  13 +-
 hbase-native-client/core/delete-test.cc |   2 +-
 hbase-native-client/core/delete.cc  |  46 +-
 hbase-native-client/core/get.cc |   3 +-
 .../core/hbase-configuration-loader.h   |   5 +-
 hbase-native-client/core/hbase-rpc-controller.h |   8 +
 hbase-native-client/core/meta-utils.cc  |  15 +-
 hbase-native-client/core/query.h|  19 +-
 hbase-native-client/core/raw-async-table.cc |  27 +-
 hbase-native-client/core/raw-async-table.h  |  21 +-
 .../core/raw-scan-result-consumer.h | 131 ++
 hbase-native-client/core/region-location.h  |   2 +-
 .../core/request-converter-test.cc  |   3 +-
 hbase-native-client/core/request-converter.cc   |  96 +++-
 hbase-native-client/core/request-converter.h|  15 +-
 hbase-native-client/core/response-converter.cc  |  38 +-
 hbase-native-client/core/response-converter.h   |   5 +-
 hbase-native-client/core/result-scanner.h   |  47 ++
 hbase-native-client/core/result-test.cc |  26 +-
 hbase-native-client/core/result.cc  |  60 ++-
 hbase-native-client/core/result.h   |  18 +-
 .../core/scan-result-cache-test.cc  | 177 
 hbase-native-client/core/scan-result-cache.cc   | 160 +++
 hbase-native-client/core/scan-result-cache.h|  80 
 hbase-native-client/core/scan-test.cc   |  15 +-
 hbase-native-client/core/scan.cc|  19 +-
 hbase-native-client/core/scan.h |  13 +-
 hbase-native-client/core/scanner-test.cc| 368 +++
 hbase-native-client/core/simple-client.cc   |  85 +++-
 hbase-native-client/core/table.cc   |  16 +
 hbase-native-client/core/table.h|   5 +
 hbase-native-client/exceptions/BUCK |  10 +-
 hbase-native-client/exceptions/exception.h  | 192 +---
 hbase-native-client/test-util/BUCK  |  62 +--
 hbase-native-client/test-util/mini-cluster.cc   |  38 +-
 hbase-native-client/test-util/mini-cluster.h|   6 +-
 hbase-native-client/test-util/test-util.cc  |   9 +
 hbase-native-client/test-util/test-util.h   |   3 +
 hbase-native-client/utils/BUCK  |   1 +
 hbase-native-client/utils/bytes-util-test.cc|  11 +
 hbase-native-client/utils/bytes-util.h  |  24 +-
 hbase-native-client/utils/optional.h|  32 ++
 61 files changed, 3065 insertions(+), 341 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bde59b25/hbase-native-client/bin/format-code.sh
--
diff --git a/hbase-native-client/bin/format-code.sh 
b/hbase-native-client/bin/format-code.sh
index 8a19930..fe236d8 100755
--- a/hbase-native-client/bin/format-code.sh
+++ b/hbase-native-client/bin/format-code.sh
@@ -19,5 +19,5 @@ set -euo pipefail
 IFS=$'\n\t'
 
 
-find core connection serde utils test-util security -name "*.h" -or -name 
"*.cc" | xargs -P8 clang-format -i --style='{BasedOnStyle: Google, ColumnLimit: 
100}'
-find core 

[1/3] hbase git commit: HBASE-17907 [C++] End to end Scans from Client/Table

2017-06-06 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 3de6ecd0e -> bde59b250


http://git-wip-us.apache.org/repos/asf/hbase/blob/bde59b25/hbase-native-client/exceptions/exception.h
--
diff --git a/hbase-native-client/exceptions/exception.h 
b/hbase-native-client/exceptions/exception.h
index f25fbea..9cbd7ae 100644
--- a/hbase-native-client/exceptions/exception.h
+++ b/hbase-native-client/exceptions/exception.h
@@ -18,25 +18,22 @@
  */
 #pragma once
 
+#include 
+#include 
 #include 
 #include 
 #include 
-#include 
-#include 
 
 namespace hbase {
 
 class ThrowableWithExtraContext {
-public:
-  ThrowableWithExtraContext(folly::exception_wrapper cause,
-  const long& when) :
-  cause_(cause), when_(when), extras_("") {
-  }
+ public:
+  ThrowableWithExtraContext(folly::exception_wrapper cause, const long& when)
+  : cause_(cause), when_(when), extras_("") {}
 
-  ThrowableWithExtraContext(folly::exception_wrapper cause,
-  const long& when, const std::string& extras) :
-  cause_(cause), when_(when), extras_(extras) {
-  }
+  ThrowableWithExtraContext(folly::exception_wrapper cause, const long& when,
+const std::string& extras)
+  : cause_(cause), when_(when), extras_(extras) {}
 
   virtual std::string ToString() {
 // TODO:
@@ -44,55 +41,45 @@ public:
 return extras_ + ", " + cause_.what().toStdString();
   }
 
-  virtual folly::exception_wrapper cause() {
-return cause_;
-  }
-private:
+  virtual folly::exception_wrapper cause() { return cause_; }
+
+ private:
   folly::exception_wrapper cause_;
   long when_;
   std::string extras_;
 };
 
-class IOException: public std::logic_error {
-public:
+class IOException : public std::logic_error {
+ public:
   IOException() : logic_error("") {}
 
-  IOException(
-const std::string& what) :
-logic_error(what) {}
-  IOException(
-  const std::string& what,
- folly::exception_wrapper cause) :
-  logic_error(what), cause_(cause) {}
+  IOException(const std::string& what) : logic_error(what) {}
+  IOException(const std::string& what, folly::exception_wrapper cause)
+  : logic_error(what), cause_(cause) {}
   virtual ~IOException() = default;
 
-  virtual folly::exception_wrapper cause() {
-return cause_;
-  }
-private:
+  virtual folly::exception_wrapper cause() { return cause_; }
+
+ private:
   folly::exception_wrapper cause_;
 };
 
-class RetriesExhaustedException: public IOException {
-public:
-  RetriesExhaustedException(
-  const int& num_retries,
-  std::shared_ptr exceptions) :
-IOException(
-GetMessage(num_retries, exceptions),
-exceptions->empty() ? folly::exception_wrapper{}
-  : (*exceptions)[exceptions->size() - 1].cause()){
-  }
+class RetriesExhaustedException : public IOException {
+ public:
+  RetriesExhaustedException(const int& num_retries,
+
std::shared_ptr exceptions)
+  : IOException(GetMessage(num_retries, exceptions),
+exceptions->empty() ? folly::exception_wrapper{}
+: (*exceptions)[exceptions->size() - 
1].cause()) {}
   virtual ~RetriesExhaustedException() = default;
 
-private:
-  std::string GetMessage(
-  const int& num_retries,
-  std::shared_ptr exceptions) {
+ private:
+  std::string GetMessage(const int& num_retries,
+ 
std::shared_ptr exceptions) {
 std::string buffer("Failed after attempts=");
 buffer.append(std::to_string(num_retries + 1));
 buffer.append(", exceptions:\n");
-for (auto it = exceptions->begin();  it != exceptions->end(); it++) {
+for (auto it = exceptions->begin(); it != exceptions->end(); it++) {
   buffer.append(it->ToString());
   buffer.append("\n");
 }
@@ -100,74 +87,141 @@ private:
   }
 };
 
-class HBaseIOException : public IOException {
-};
+class HBaseIOException : public IOException {};
 
 class RemoteException : public IOException {
-public:
-
+ public:
   RemoteException() : port_(0), do_not_retry_(false) {}
 
-  RemoteException(const std::string& what) :
-  IOException(what), port_(0), do_not_retry_(false) {}
+  RemoteException(const std::string& what) : IOException(what), port_(0), 
do_not_retry_(false) {}
 
-  RemoteException(
-  const std::string& what,
-  folly::exception_wrapper cause) :
-  IOException(what, cause), port_(0), do_not_retry_(false) {}
+  RemoteException(const std::string& what, folly::exception_wrapper cause)
+  : IOException(what, cause), port_(0), do_not_retry_(false) {}
 
   virtual ~RemoteException() = default;
 
-  std::string exception_class_name() const {
-return exception_class_name_;
-  }
+  std::string exception_class_name() const { return exception_class_name_; }
 
   RemoteException* set_exception_class_name(const 

[2/3] hbase git commit: HBASE-17907 [C++] End to end Scans from Client/Table

2017-06-06 Thread enis
http://git-wip-us.apache.org/repos/asf/hbase/blob/bde59b25/hbase-native-client/core/raw-async-table.cc
--
diff --git a/hbase-native-client/core/raw-async-table.cc 
b/hbase-native-client/core/raw-async-table.cc
index f71fbba..998e2f1 100644
--- a/hbase-native-client/core/raw-async-table.cc
+++ b/hbase-native-client/core/raw-async-table.cc
@@ -111,9 +111,10 @@ folly::Future RawAsyncTable::Put(const 
hbase::Put& put) {
 folly::Future RawAsyncTable::Delete(const hbase::Delete& del) {
   auto caller =
   CreateCallerBuilder(del.row(), 
connection_conf_->write_rpc_timeout())
-  ->action([=, ](std::shared_ptr 
controller,
- std::shared_ptr loc,
- std::shared_ptr rpc_client) -> 
folly::Future {
+  ->action([=, ](
+   std::shared_ptr controller,
+   std::shared_ptr loc,
+   std::shared_ptr rpc_client) -> 
folly::Future {
 return Call(
 rpc_client, controller, loc, del, 
::RequestConverter::DeleteToMutateRequest,
 [](const Response& r) -> folly::Unit { return folly::unit; });
@@ -143,4 +144,24 @@ 
folly::Future>> RawAsyncTable::B
 
   return caller->Call().then([caller](auto r) { return r; });
 }
+
+void RawAsyncTable::Scan(const hbase::Scan& scan, 
std::shared_ptr consumer) {
+  auto scanner = AsyncClientScanner::Create(
+  connection_, SetDefaultScanConfig(scan), table_name_, consumer, 
connection_conf_->pause(),
+  connection_conf_->max_retries(), connection_conf_->scan_timeout(),
+  connection_conf_->rpc_timeout(), 
connection_conf_->start_log_errors_count());
+  scanner->Start();
+}
+
+std::shared_ptr RawAsyncTable::SetDefaultScanConfig(const 
hbase::Scan& scan) {
+  // always create a new scan object as we may reset the start row later.
+  auto new_scan = std::make_shared(scan);
+  if (new_scan->Caching() <= 0) {
+new_scan->SetCaching(default_scanner_caching_);
+  }
+  if (new_scan->MaxResultSize() <= 0) {
+new_scan->SetMaxResultSize(default_scanner_max_result_size_);
+  }
+  return new_scan;
+}
 }  // namespace hbase

http://git-wip-us.apache.org/repos/asf/hbase/blob/bde59b25/hbase-native-client/core/raw-async-table.h
--
diff --git a/hbase-native-client/core/raw-async-table.h 
b/hbase-native-client/core/raw-async-table.h
index c8e9f2f..8c40dae 100644
--- a/hbase-native-client/core/raw-async-table.h
+++ b/hbase-native-client/core/raw-async-table.h
@@ -24,7 +24,9 @@
 #include 
 #include 
 #include 
+
 #include "core/async-batch-rpc-retrying-caller.h"
+#include "core/async-client-scanner.h"
 #include "core/async-connection.h"
 #include "core/async-rpc-retrying-caller-factory.h"
 #include "core/async-rpc-retrying-caller.h"
@@ -34,6 +36,7 @@
 #include "core/increment.h"
 #include "core/put.h"
 #include "core/result.h"
+#include "core/scan.h"
 
 namespace hbase {
 
@@ -48,14 +51,22 @@ class RawAsyncTable {
   : connection_(connection),
 connection_conf_(connection->connection_conf()),
 table_name_(table_name),
-rpc_client_(connection->rpc_client()) {}
+rpc_client_(connection->rpc_client()) {
+default_scanner_caching_ = connection_conf_->scanner_caching();
+default_scanner_max_result_size_ = 
connection_conf_->scanner_max_result_size();
+  }
   virtual ~RawAsyncTable() = default;
 
   folly::Future Get(const hbase::Get& get);
 
-   folly::Future Delete(const hbase::Delete& del);
-   folly::Future Increment(const 
hbase::Increment& increment);
+  folly::Future Delete(const hbase::Delete& del);
+
+  folly::Future Increment(const 
hbase::Increment& increment);
+
   folly::Future Put(const hbase::Put& put);
+
+  void Scan(const hbase::Scan& scan, std::shared_ptr 
consumer);
+
   void Close() {}
 
   folly::Future>> Get(
@@ -69,6 +80,8 @@ class RawAsyncTable {
   std::shared_ptr connection_conf_;
   std::shared_ptr table_name_;
   std::shared_ptr rpc_client_;
+  int32_t default_scanner_caching_;
+  int64_t default_scanner_max_result_size_;
 
   /* Methods */
   template 
@@ -81,5 +94,7 @@ class RawAsyncTable {
   template 
   std::shared_ptr CreateCallerBuilder(
   std::string row, std::chrono::nanoseconds rpc_timeout);
+
+  std::shared_ptr SetDefaultScanConfig(const hbase::Scan& scan);
 };
 }  // namespace hbase

http://git-wip-us.apache.org/repos/asf/hbase/blob/bde59b25/hbase-native-client/core/raw-scan-result-consumer.h
--
diff --git a/hbase-native-client/core/raw-scan-result-consumer.h 
b/hbase-native-client/core/raw-scan-result-consumer.h
new file 

hbase git commit: HBASE-15160 Put back HFile's HDFS op latency sampling code and add metrics for monitoring (Yu Li and Enis Soztutar)

2017-06-06 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 356d4e918 -> ea3075e7f


HBASE-15160 Put back HFile's HDFS op latency sampling code and add metrics for 
monitoring (Yu Li and Enis Soztutar)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ea3075e7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ea3075e7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ea3075e7

Branch: refs/heads/branch-1
Commit: ea3075e7fd2db40c7d8d74a4845a04b657b0d5d7
Parents: 356d4e9
Author: Enis Soztutar 
Authored: Tue Jun 6 14:40:50 2017 -0700
Committer: Enis Soztutar 
Committed: Tue Jun 6 14:41:02 2017 -0700

--
 .../apache/hadoop/hbase/io/MetricsIOSource.java | 80 +++
 .../hadoop/hbase/io/MetricsIOWrapper.java   | 24 ++
 .../MetricsRegionServerSourceFactory.java   | 10 +++
 .../hadoop/hbase/io/MetricsIOSourceImpl.java| 84 
 .../MetricsRegionServerSourceFactoryImpl.java   |  7 ++
 .../org/apache/hadoop/hbase/io/MetricsIO.java   | 61 ++
 .../hadoop/hbase/io/MetricsIOWrapperImpl.java   | 29 +++
 .../org/apache/hadoop/hbase/io/hfile/HFile.java | 27 ++-
 .../hadoop/hbase/io/hfile/HFileBlock.java   | 26 +++---
 .../hadoop/hbase/io/hfile/HFileReaderV2.java|  4 +-
 .../apache/hadoop/hbase/io/TestMetricsIO.java   | 54 +
 .../hadoop/hbase/io/hfile/TestChecksum.java | 35 
 .../hadoop/hbase/io/hfile/TestHFileBlock.java   | 20 ++---
 .../hbase/io/hfile/TestHFileBlockIndex.java |  2 +-
 .../hbase/io/hfile/TestHFileEncryption.java | 10 ++-
 .../hbase/io/hfile/TestHFileWriterV2.java   |  4 +-
 .../hbase/io/hfile/TestHFileWriterV3.java   |  4 +-
 17 files changed, 435 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ea3075e7/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
new file mode 100644
index 000..3f27747
--- /dev/null
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.io;
+
+import org.apache.hadoop.hbase.metrics.BaseSource;
+
+public interface MetricsIOSource extends BaseSource {
+
+  /**
+   * The name of the metrics
+   */
+  String METRICS_NAME = "IO";
+
+  /**
+   * The name of the metrics context that metrics will be under.
+   */
+  String METRICS_CONTEXT = "regionserver";
+
+  /**
+   * Description
+   */
+  String METRICS_DESCRIPTION = "Metrics about FileSystem IO";
+
+  /**
+   * The name of the metrics context that metrics will be under in jmx
+   */
+  String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
+
+
+  String FS_READ_TIME_HISTO_KEY = "fsReadTime";
+  String FS_PREAD_TIME_HISTO_KEY = "fsPReadTime";
+  String FS_WRITE_HISTO_KEY = "fsWriteTime";
+
+  String CHECKSUM_FAILURES_KEY = "fsChecksumFailureCount";
+
+  String FS_READ_TIME_HISTO_DESC
+= "Latency of HFile's sequential reads on this region server in 
milliseconds";
+  String FS_PREAD_TIME_HISTO_DESC
+= "Latency of HFile's positional reads on this region server in 
milliseconds";
+  String FS_WRITE_TIME_HISTO_DESC
+= "Latency of HFile's writes on this region server in milliseconds";
+
+  String CHECKSUM_FAILURES_DESC = "Number of checksum failures for the HBase 
HFile checksums at the"
+  + " HBase level (separate from HDFS checksums)";
+
+
+  /**
+   * Update the fs sequential read time histogram
+   * @param t time it took, in milliseconds
+   */
+  void updateFsReadTime(long t);
+
+  /**
+   * Update the fs positional read time histogram
+   * @param t time it took, in milliseconds
+   */
+  void updateFsPReadTime(long t);
+
+  /**
+   * 

[2/2] hbase git commit: HBASE-9393 Hbase does not closing a closed socket resulting in many CLOSE_WAIT

2017-06-06 Thread apurtell
HBASE-9393 Hbase does not closing a closed socket resulting in many CLOSE_WAIT

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/356d4e91
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/356d4e91
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/356d4e91

Branch: refs/heads/branch-1
Commit: 356d4e9187fc6748169d3aaebe516fb2257d8835
Parents: 39e8e2f
Author: Ashish Singhi 
Authored: Tue Jun 6 17:49:08 2017 +0530
Committer: Andrew Purtell 
Committed: Tue Jun 6 12:59:19 2017 -0700

--
 .../hbase/io/FSDataInputStreamWrapper.java  | 71 +++-
 .../hadoop/hbase/io/HalfStoreFileReader.java|  4 ++
 .../hbase/io/hfile/AbstractHFileReader.java |  8 +++
 .../org/apache/hadoop/hbase/io/hfile/HFile.java | 26 +--
 .../hadoop/hbase/io/hfile/HFileBlock.java   | 21 +-
 .../hadoop/hbase/io/hfile/HFileReaderV2.java|  5 ++
 .../hadoop/hbase/io/hfile/HFileScanner.java |  5 ++
 .../hbase/regionserver/StoreFileScanner.java|  1 +
 8 files changed, 133 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/356d4e91/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
index b06be6b..dc168da 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
@@ -18,7 +18,12 @@
 package org.apache.hadoop.hbase.io;
 
 import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,6 +37,8 @@ import com.google.common.annotations.VisibleForTesting;
  * see method comments.
  */
 public class FSDataInputStreamWrapper {
+  private static final Log LOG = 
LogFactory.getLog(FSDataInputStreamWrapper.class);
+
   private final HFileSystem hfs;
   private final Path path;
   private final FileLink link;
@@ -74,6 +81,11 @@ public class FSDataInputStreamWrapper {
   // reads without hbase checksum verification.
   private volatile int hbaseChecksumOffCount = -1;
 
+  private Boolean instanceOfCanUnbuffer = null;
+  // Using reflection to get org.apache.hadoop.fs.CanUnbuffer#unbuffer method 
to avoid compilation
+  // errors against Hadoop pre 2.6.4 and 2.7.1 versions.
+  private Method unbuffer = null;
+
   public FSDataInputStreamWrapper(FileSystem fs, Path path) throws IOException 
{
 this(fs, null, path, false);
   }
@@ -219,4 +231,61 @@ public class FSDataInputStreamWrapper {
   public HFileSystem getHfs() {
 return this.hfs;
   }
-}
+
+  /**
+   * This will free sockets and file descriptors held by the stream only when 
the stream implements
+   * org.apache.hadoop.fs.CanUnbuffer. NOT THREAD SAFE. Must be called only 
when all the clients
+   * using this stream to read the blocks have finished reading. If by chance 
the stream is
+   * unbuffered and there are clients still holding this stream for read then 
on next client read
+   * request a new socket will be opened by Datanode without client knowing 
about it and will serve
+   * its read request. Note: If this socket is idle for some time then the 
DataNode will close the
+   * socket and the socket will move into CLOSE_WAIT state and on the next 
client request on this
+   * stream, the current socket will be closed and a new socket will be opened 
to serve the
+   * requests.
+   */
+  @SuppressWarnings({ "rawtypes" })
+  public void unbuffer() {
+FSDataInputStream stream = this.getStream(this.shouldUseHBaseChecksum());
+if (stream != null) {
+  InputStream wrappedStream = stream.getWrappedStream();
+  // CanUnbuffer interface was added as part of HDFS-7694 and the fix is 
available in Hadoop
+  // 2.6.4+ and 2.7.1+ versions only so check whether the stream object 
implements the
+  // CanUnbuffer interface or not and based on that call the unbuffer api.
+  final Class streamClass = 
wrappedStream.getClass();
+  if (this.instanceOfCanUnbuffer == null) {
+// To ensure we compute whether the stream is instance of CanUnbuffer 
only once.
+this.instanceOfCanUnbuffer = false;
+Class[] streamInterfaces = streamClass.getInterfaces();
+   

[1/2] hbase git commit: HBASE-9393 Hbase does not closing a closed socket resulting in many CLOSE_WAIT

2017-06-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 39e8e2fb5 -> 356d4e918
  refs/heads/master ee0f148c7 -> 1950acc67


HBASE-9393 Hbase does not closing a closed socket resulting in many CLOSE_WAIT

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1950acc6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1950acc6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1950acc6

Branch: refs/heads/master
Commit: 1950acc67a2510be370d6fc7859b562e58070942
Parents: ee0f148
Author: Ashish Singhi 
Authored: Tue Jun 6 12:50:59 2017 +0530
Committer: Andrew Purtell 
Committed: Tue Jun 6 12:52:46 2017 -0700

--
 .../hbase/io/FSDataInputStreamWrapper.java  | 71 +++-
 .../org/apache/hadoop/hbase/io/hfile/HFile.java | 24 +--
 .../hadoop/hbase/io/hfile/HFileBlock.java   | 23 +++
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |  9 +++
 4 files changed, 121 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1950acc6/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
index 055e46a..25a3373 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java
@@ -19,8 +19,13 @@ package org.apache.hadoop.hbase.io;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -36,6 +41,8 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 public class FSDataInputStreamWrapper implements Closeable {
+  private static final Log LOG = 
LogFactory.getLog(FSDataInputStreamWrapper.class);
+
   private final HFileSystem hfs;
   private final Path path;
   private final FileLink link;
@@ -80,6 +87,11 @@ public class FSDataInputStreamWrapper implements Closeable {
   // reads without hbase checksum verification.
   private volatile int hbaseChecksumOffCount = -1;
 
+  private Boolean instanceOfCanUnbuffer = null;
+  // Using reflection to get org.apache.hadoop.fs.CanUnbuffer#unbuffer method 
to avoid compilation
+  // errors against Hadoop pre 2.6.4 and 2.7.1 versions.
+  private Method unbuffer = null;
+
   public FSDataInputStreamWrapper(FileSystem fs, Path path) throws IOException 
{
 this(fs, path, false, -1L);
   }
@@ -232,4 +244,61 @@ public class FSDataInputStreamWrapper implements Closeable 
{
   public HFileSystem getHfs() {
 return this.hfs;
   }
-}
+
+  /**
+   * This will free sockets and file descriptors held by the stream only when 
the stream implements
+   * org.apache.hadoop.fs.CanUnbuffer. NOT THREAD SAFE. Must be called only 
when all the clients
+   * using this stream to read the blocks have finished reading. If by chance 
the stream is
+   * unbuffered and there are clients still holding this stream for read then 
on next client read
+   * request a new socket will be opened by Datanode without client knowing 
about it and will serve
+   * its read request. Note: If this socket is idle for some time then the 
DataNode will close the
+   * socket and the socket will move into CLOSE_WAIT state and on the next 
client request on this
+   * stream, the current socket will be closed and a new socket will be opened 
to serve the
+   * requests.
+   */
+  @SuppressWarnings({ "rawtypes" })
+  public void unbuffer() {
+FSDataInputStream stream = this.getStream(this.shouldUseHBaseChecksum());
+if (stream != null) {
+  InputStream wrappedStream = stream.getWrappedStream();
+  // CanUnbuffer interface was added as part of HDFS-7694 and the fix is 
available in Hadoop
+  // 2.6.4+ and 2.7.1+ versions only so check whether the stream object 
implements the
+  // CanUnbuffer interface or not and based on that call the unbuffer api.
+  final Class streamClass = 
wrappedStream.getClass();
+  if (this.instanceOfCanUnbuffer == null) {
+// To ensure we compute whether the stream is instance of CanUnbuffer 
only once.
+this.instanceOfCanUnbuffer = false;
+Class[] streamInterfaces = 

hbase git commit: HBASE-18126 Increment class - addendum creates TestToStringToInt64 in bytes-util-test

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 3e0332260 -> 3de6ecd0e


HBASE-18126 Increment class - addendum creates TestToStringToInt64 in 
bytes-util-test


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3de6ecd0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3de6ecd0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3de6ecd0

Branch: refs/heads/HBASE-14850
Commit: 3de6ecd0e2688c5988ea694fd2c561d66a905d62
Parents: 3e03322
Author: tedyu 
Authored: Tue Jun 6 11:32:36 2017 -0700
Committer: tedyu 
Committed: Tue Jun 6 11:32:36 2017 -0700

--
 hbase-native-client/utils/bytes-util-test.cc | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3de6ecd0/hbase-native-client/utils/bytes-util-test.cc
--
diff --git a/hbase-native-client/utils/bytes-util-test.cc 
b/hbase-native-client/utils/bytes-util-test.cc
index 676b441..ca64a21 100644
--- a/hbase-native-client/utils/bytes-util-test.cc
+++ b/hbase-native-client/utils/bytes-util-test.cc
@@ -46,7 +46,8 @@ TEST(TestBytesUtil, TestToStringBinary) {
 
   EXPECT_EQ("foo_\\x00\\xFF_bar",
 BytesUtil::ToStringBinary("foo_" + std::string{zero} + 
std::string{max} + "_bar"));
-
+}
+TEST(TestBytesUtil, TestToStringToInt64) {
   int64_t num = 761235;
   EXPECT_EQ(num, BytesUtil::ToInt64(BytesUtil::ToString(num)));
 



hbase git commit: HBASE-18005 read replica: handle the case that region server hosting both primary replica and meta region is down (huaxiang sun)

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 69d3e332f -> 39e8e2fb5


HBASE-18005 read replica: handle the case that region server hosting both 
primary replica and meta region is down (huaxiang sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/39e8e2fb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/39e8e2fb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/39e8e2fb

Branch: refs/heads/branch-1
Commit: 39e8e2fb5898847ef41068cdf46a660e66389541
Parents: 69d3e33
Author: tedyu 
Authored: Tue Jun 6 09:07:17 2017 -0700
Committer: tedyu 
Committed: Tue Jun 6 09:07:17 2017 -0700

--
 .../hadoop/hbase/client/ConnectionManager.java  |   3 +-
 .../apache/hadoop/hbase/client/MetaCache.java   |  34 
 .../RpcRetryingCallerWithReadReplicas.java  |  82 ++---
 .../client/ScannerCallableWithReplicas.java |  24 ++-
 .../hbase/client/TestReplicaWithCluster.java| 167 +--
 5 files changed, 269 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/39e8e2fb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
index aa44070..5e670a3 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
@@ -1296,7 +1296,8 @@ class ConnectionManager {
 } else {
   // If we are not supposed to be using the cache, delete any existing 
cached location
   // so it won't interfere.
-  metaCache.clearCache(tableName, row);
+  // We are only supposed to clean the cache for the specific replicaId
+  metaCache.clearCache(tableName, row, replicaId);
 }
 
 // Query the meta region

http://git-wip-us.apache.org/repos/asf/hbase/blob/39e8e2fb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
index 95b5950..2a172e7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java
@@ -319,6 +319,40 @@ public class MetaCache {
   }
 
   /**
+   * Delete a cached location with specific replicaId.
+   * @param tableName tableName
+   * @param row row key
+   * @param replicaId region replica id
+   */
+  public void clearCache(final TableName tableName, final byte [] row, int 
replicaId) {
+ConcurrentMap tableLocations = 
getTableLocations(tableName);
+
+RegionLocations regionLocations = getCachedLocation(tableName, row);
+if (regionLocations != null) {
+  HRegionLocation toBeRemoved = 
regionLocations.getRegionLocation(replicaId);
+  if (toBeRemoved != null) {
+RegionLocations updatedLocations = regionLocations.remove(replicaId);
+byte[] startKey = 
regionLocations.getRegionLocation().getRegionInfo().getStartKey();
+boolean removed;
+if (updatedLocations.isEmpty()) {
+  removed = tableLocations.remove(startKey, regionLocations);
+} else {
+  removed = tableLocations.replace(startKey, regionLocations, 
updatedLocations);
+}
+
+if (removed) {
+  if (metrics != null) {
+metrics.incrMetaCacheNumClearRegion();
+  }
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Removed " + toBeRemoved + " from cache");
+  }
+}
+  }
+}
+  }
+
+  /**
* Delete a cached location for a table, row and server
*/
   public void clearCache(final TableName tableName, final byte [] row, 
ServerName serverName) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/39e8e2fb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
index 8c5efde..e6954cc 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
+++ 

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 7889845..377894e 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -3286,15 +3286,15 @@
 
 apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.backup.HFileArchiver.StoreToFile
 
-apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetBulkTime
+apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFileComparators.GetBulkTime
 
-apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetFileSize
+apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFileComparators.GetFileSize
 
-apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetMaxTimestamp
+apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFileComparators.GetMaxTimestamp
 
-apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetPathName
+apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFileComparators.GetPathName
 
-apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetSeqId
+apply(StoreFile)
 - Method in class org.apache.hadoop.hbase.regionserver.StoreFileComparators.GetSeqId
 
 apply(byte)
 - Method in enum org.apache.hadoop.hbase.util.Order
 
@@ -3706,9 +3706,9 @@
 
 ASSIGN_MAX_ATTEMPTS
 - Static variable in class org.apache.hadoop.hbase.master.assignment.AssignmentManager
 
-ASSIGN_SEQ_IDS
 - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
+ASSIGN_METRIC_PREFIX
 - Static variable in interface org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource
 
-ASSIGN_TIME_NAME
 - Static variable in interface org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource
+ASSIGN_SEQ_IDS
 - Static variable in class org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
 
 assignDispatchWaitMillis
 - Variable in class org.apache.hadoop.hbase.master.assignment.AssignmentManager
 
@@ -3789,6 +3789,8 @@
 
 assignMetaReplicas()
 - Method in class org.apache.hadoop.hbase.master.MasterMetaBootstrap
 
+assignMetrics
 - Variable in class org.apache.hadoop.hbase.master.MetricsAssignmentManagerSourceImpl
+
 AssignProcedure - Class in org.apache.hadoop.hbase.master.assignment
 
 Procedure that describe the assignment of a single 
region.
@@ -3801,6 +3803,8 @@
 
 AssignProcedure(HRegionInfo,
 ServerName) - Constructor for class 
org.apache.hadoop.hbase.master.assignment.AssignProcedure
 
+assignProcMetrics
 - Variable in class org.apache.hadoop.hbase.master.MetricsAssignmentManager
+
 assignQueueFullCond
 - Variable in class org.apache.hadoop.hbase.master.assignment.AssignmentManager
 
 assignQueueLock
 - Variable in class org.apache.hadoop.hbase.master.assignment.AssignmentManager
@@ -3825,8 +3829,6 @@
 
 assignThread
 - Variable in class org.apache.hadoop.hbase.master.assignment.AssignmentManager
 
-assignTimeHisto
 - Variable in class org.apache.hadoop.hbase.master.MetricsAssignmentManagerSourceImpl
-
 asSubByteBuff(long,
 int) - Method in class org.apache.hadoop.hbase.util.ByteBufferArray
 
 Creates a ByteBuff from a given array of ByteBuffers from 
the given offset to the
@@ -6442,7 +6444,7 @@
 
 Meta data block name for bloom filter bits.
 
-BLOOM_FILTER_TYPE_KEY
 - Static variable in class org.apache.hadoop.hbase.regionserver.StoreFile
+BLOOM_FILTER_TYPE_KEY
 - Static variable in interface org.apache.hadoop.hbase.regionserver.StoreFile
 
 Bloom filter Type in FileInfo
 
@@ -6494,7 +6496,7 @@
 BloomFilterFactory - Class in org.apache.hadoop.hbase.util
 
 Handles Bloom filter initialization based on configuration 
and serialized
- metadata in the reader and writer of StoreFile.
+ metadata in the reader and writer of StoreFile.
 
 BloomFilterFactory()
 - Constructor for class org.apache.hadoop.hbase.util.BloomFilterFactory
 
@@ -7196,8 +7198,6 @@
 
 Build the ZK quorum server string with "server:clientport" 
list, separated by ','
 
-BULK_ASSIGN_TIME_NAME
 - Static variable in interface org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource
-
 BULK_LOAD
 - Static variable in class org.apache.hadoop.hbase.regionserver.wal.WALEdit
 
 BULK_LOAD_FAMILY
 - Static variable in class org.apache.hadoop.hbase.backup.impl.BackupSystemTable
@@ -7238,11 +7238,11 @@
 
 Staging dir used by bulk load
 
-BULKLOAD_TASK_KEY
 - Static variable in class org.apache.hadoop.hbase.regionserver.StoreFile
+BULKLOAD_TASK_KEY
 - Static variable in interface org.apache.hadoop.hbase.regionserver.StoreFile
 
 Meta key set when store file is a result of a bulk 
load
 
-BULKLOAD_TIME_KEY
 - Static variable in class org.apache.hadoop.hbase.regionserver.StoreFile
+BULKLOAD_TIME_KEY
 - 

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index e836c17..09d60f4 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -286,10 +286,10 @@
 Warnings
 Errors
 
-2223
+2227
 0
 0
-14486
+14468
 
 Files
 
@@ -3032,7 +3032,7 @@
 org/apache/hadoop/hbase/master/MetricsAssignmentManager.java
 0
 0
-6
+5
 
 org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java
 0
@@ -3687,7 +3687,7 @@
 org/apache/hadoop/hbase/procedure2/Procedure.java
 0
 0
-11
+10
 
 org/apache/hadoop/hbase/procedure2/ProcedureDeque.java
 0
@@ -4027,7 +4027,7 @@
 org/apache/hadoop/hbase/regionserver/CompactionTool.java
 0
 0
-12
+4
 
 org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
 0
@@ -4124,380 +4124,390 @@
 0
 52
 
+org/apache/hadoop/hbase/regionserver/HStoreFile.java
+0
+0
+4
+
 org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
 0
 0
 15
-
+
 org/apache/hadoop/hbase/regionserver/HeapMemoryTuner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/InternalScan.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/InternalScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/regionserver/LeaseException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/Leases.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/regionserver/LogRoller.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/regionserver/MemStore.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java
 0
 0
 19
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
 0
 0
 31
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreSegmentsIterator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsHeapMemoryManagerSource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsRegion.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsTable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/regionserver/MiniBatchOperationInProgress.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/regionserver/MutableSegment.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/OnlineRegions.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/regionserver/OperationStatus.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 0
 0
 132
-
+
 org/apache/hadoop/hbase/regionserver/RSStatusServlet.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/regionserver/Region.java
 0
 0
 61
-
+
 org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 0
 0
 185
-
+
 org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/regionserver/RegionScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/regionserver/RegionServerServices.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java
 0
 0
 6
-
+
 

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
index d98b54f..1c410ff 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.html
@@ -389,7 +389,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner, 
setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.html
index 5708c32..6734a0e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/DispatchMergingRegionsProcedure.html
@@ -436,7 +436,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 

hbase-site git commit: INFRA-10751 Empty commit

2017-06-06 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 6ad4f21aa -> f4a44f441


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/f4a44f44
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/f4a44f44
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/f4a44f44

Branch: refs/heads/asf-site
Commit: f4a44f4411991740120aedab18d532bde965ee2d
Parents: 6ad4f21
Author: jenkins 
Authored: Tue Jun 6 15:00:30 2017 +
Committer: jenkins 
Committed: Tue Jun 6 15:00:30 2017 +

--

--




[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index fd5de37..0e0452e 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20170604144755+00'00')
-/ModDate (D:20170604144755+00'00')
+/CreationDate (D:20170606144800+00'00')
+/ModDate (D:20170606144800+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 4019 0 R
-/PageLabels 4227 0 R
+/Outlines 4021 0 R
+/PageLabels 4229 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
@@ -22,7 +22,7 @@ endobj
 3 0 obj
 << /Type /Pages
 /Count 675
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 55 
0 R 58 0 R 60 0 R 62 0 R 66 0 R 71 0 R 74 0 R 79 0 R 81 0 R 84 0 R 86 0 R 92 0 
R 101 0 R 106 0 R 108 0 R 120 0 R 123 0 R 130 0 R 136 0 R 145 0 R 154 0 R 166 0 
R 170 0 R 172 0 R 176 0 R 182 0 R 184 0 R 186 0 R 188 0 R 190 0 R 193 0 R 199 0 
R 201 0 R 204 0 R 206 0 R 208 0 R 210 0 R 212 0 R 214 0 R 217 0 R 220 0 R 224 0 
R 226 0 R 228 0 R 230 0 R 232 0 R 234 0 R 236 0 R 238 0 R 245 0 R 247 0 R 249 0 
R 251 0 R 254 0 R 258 0 R 263 0 R 268 0 R 271 0 R 274 0 R 290 0 R 299 0 R 305 0 
R 317 0 R 326 0 R 331 0 R 333 0 R 335 0 R 346 0 R 351 0 R 355 0 R 360 0 R 367 0 
R 378 0 R 390 0 R 404 0 R 413 0 R 415 0 R 417 0 R 423 0 R 434 0 R 445 0 R 456 0 
R 459 0 R 462 0 R 466 0 R 470 0 R 474 0 R 477 0 R 479 0 R 482 0 R 486 0 R 488 0 
R 492 0 R 497 0 R 501 0 R 507 0 R 509 0 R 515 0 R 517 0 R 521 0 R 530 0 R 532 0 
R 536 0 R 539 0 R 542 0 R 545 0 R 559 0 R 566 0 R 573 0 R 585 0 R 591 0 R 599 0 
R 608 0 R 611 0 R 615 0 R 618 0 R 629 0
  R 637 0 R 643 0 R 648 0 R 652 0 R 654 0 R 668 0 R 680 0 R 686 0 R 692 0 R 695 
0 R 704 0 R 712 0 R 716 0 R 721 0 R 726 0 R 728 0 R 730 0 R 732 0 R 740 0 R 749 
0 R 753 0 R 761 0 R 769 0 R 775 0 R 779 0 R 785 0 R 790 0 R 795 0 R 803 0 R 805 
0 R 809 0 R 814 0 R 820 0 R 823 0 R 830 0 R 840 0 R 844 0 R 846 0 R 849 0 R 853 
0 R 858 0 R 861 0 R 873 0 R 877 0 R 882 0 R 890 0 R 895 0 R 899 0 R 903 0 R 905 
0 R 908 0 R 910 0 R 914 0 R 916 0 R 920 0 R 924 0 R 928 0 R 933 0 R 938 0 R 941 
0 R 943 0 R 950 0 R 956 0 R 964 0 R 973 0 R 977 0 R 982 0 R 986 0 R 988 0 R 997 
0 R 1000 0 R 1005 0 R 1008 0 R 1017 0 R 1020 0 R 1026 0 R 1033 0 R 1036 0 R 
1038 0 R 1047 0 R 1049 0 R 1051 0 R 1054 0 R 1056 0 R 1058 0 R 1060 0 R 1062 0 
R 1064 0 R 1068 0 R 1072 0 R 1077 0 R 1079 0 R 1081 0 R 1083 0 R 1085 0 R 1090 
0 R 1099 0 R 1102 0 R 1104 0 R 1106 0 R  0 R 1113 0 R 1116 0 R 1118 0 R 
1120 0 R 1122 0 R 1125 0 R 1130 0 R 1135 0 R 1145 0 R 1150 0 R 1164 0 R 1177 0 
R 1190 0 R 1199 0 R 1213 0 R 1217 0 R 1227 0 R 12
 40 0 R 1243 0 R 1255 0 R 1264 0 R 1271 0 R 1275 0 R 1285 0 R 1290 0 R 1294 0 R 
1300 0 R 1306 0 R 1313 0 R 1321 0 R 1323 0 R 1335 0 R 1337 0 R 1342 0 R 1346 0 
R 1351 0 R 1361 0 R 1367 0 R 1373 0 R 1375 0 R 1377 0 R 1390 0 R 1396 0 R 1404 
0 R 1409 0 R 1421 0 R 1428 0 R 1433 0 R 1443 0 R 1451 0 R 1454 0 R 1460 0 R 
1464 0 R 1467 0 R 1472 0 R 1475 0 R 1479 0 R 1485 0 R 1489 0 R 1494 0 R 1500 0 
R 1504 0 R 1507 0 R 1509 0 R 1517 0 R 1525 0 R 1531 0 R 1536 0 R 1540 0 R 1543 
0 R 1549 0 R 1555 0 R 1560 0 R 1562 0 R 1564 0 R 1567 0 R 1569 0 R 1577 0 R 
1580 0 R 1586 0 R 1594 0 R 1598 0 R 1603 0 R 1609 0 R 1612 0 R 1614 0 R 1616 0 
R 1618 0 R 1625 0 R 1635 0 R 1637 0 R 1639 0 R 1641 0 R 1643 0 R 1646 0 R 1648 
0 R 1650 0 R 1652 0 R 1655 0 R 1657 0 R 1659 0 R 1661 0 R 1665 0 R 1669 0 R 
1678 0 R 1680 0 R 1682 0 R 1684 0 R 1686 0 R 1693 0 R 1695 0 R 1700 0 R 1702 0 
R 1704 0 R 1711 0 R 1716 0 R 1722 0 R 1726 0 R 1729 0 R 1732 0 R 1736 0 R 1738 
0 R 1741 0 R 1743 0 R 1745 0 R 1747 0 R 1751 0 R 1753 0 R 
 1756 0 R 1758 0 R 1760 0 R 1762 0 R 1764 0 R 1772 0 R 1775 0 R 1780 0 R 1782 0 
R 1784 0 R 1786 0 R 1788 0 R 1796 0 R 1807 0 R 1810 0 R 1824 0 R 1836 0 R 1840 
0 R 1846 0 R 1851 0 R 1854 0 R 1859 0 R 1861 0 R 1866 0 R 1868 0 R 1871 0 R 
1873 0 R 1875 0 R 1877 0 R 1879 0 R 1883 0 R 1885 0 R 1889 0 R 1893 0 R 1900 0 
R 1907 0 R 1918 0 R 1932 0 R 1944 0 R 1961 0 R 1965 0 R 1967 0 R 1971 0 R 1988 
0 R 1996 0 R 2003 0 R 2012 0 R 2018 0 R 2028 0 R 2039 0 R 2045 0 R 2054 0 R 
2066 0 R 2083 0 R 2094 0 R 2097 0 R 2106 0 R 2121 0 R 2128 0 R 2131 0 R 2136 0 
R 2141 0 R 2151 0 R 2159 0 R 2162 0 R 2164 0 R 2168 0 R 2183 0 R 2192 0 R 2197 
0 R 2201 0 R 2204 0 R 2206 0 R 2208 0 R 2210 0 R 2212 0 R 2217 0 R 2219 0 R 
2229 0 R 2239 0 R 2246 0 R 2258 0 R 2263 0 R 2267 0 R 2280 0 R 2287 0 R 2293 0 
R 2295 0 R 2305 0 R 2312 0 R 2323 0 R 2327 0 R 2338 0 R 2344 0 R 2354 0 R 2363 
0 R 2371 0 R 2377 0 R 2382 0 R 2386 0 R 2390 0 R 2392 0 R 2398 0 R 2402 0 R 
2406 0 R 2412 0 R 2419 0 R 2424 0 R 2428 0 R 2437 0 R 2442 0 
 R 2447 0 R 2460 0 R 

[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 0a639aa..7d11826 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20170604144916)
-  /CreationDate (D:20170604144916)
+  /ModDate (D:20170606144924)
+  /CreationDate (D:20170606144924)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/apidocs/org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html 
b/apidocs/org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html
index 9adcfc7..793084f 100644
--- a/apidocs/org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html
+++ b/apidocs/org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html
@@ -273,7 +273,7 @@ the order they are declared.
 
 
 values
-public staticSpaceViolationPolicy[]values()
+public staticSpaceViolationPolicy[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -293,7 +293,7 @@ for (SpaceViolationPolicy c : SpaceViolationPolicy.values())
 
 
 valueOf
-public staticSpaceViolationPolicyvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticSpaceViolationPolicyvalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/book.html
--
diff --git a/book.html b/book.html
index e3950ab..f0833f3 100644
--- a/book.html
+++ b/book.html
@@ -34272,13 +34272,19 @@ See http://www.apache.org/foundation/board/reporting;>ASF board reporti
 
 
 
-Figure 14. Apache HBase Orca
+Figure 14. Apache HBase Orca, HBase Colors,  Font
 
 
 https://issues.apache.org/jira/browse/HBASE-4920;>An Orca is the 
Apache HBase mascot. See NOTICES.txt.
 Our Orca logo we got here: http://www.vectorfree.com/jumping-orca; 
class="bare">http://www.vectorfree.com/jumping-orca It is licensed Creative 
Commons Attribution 3.0.
 See https://creativecommons.org/licenses/by/3.0/us/; 
class="bare">https://creativecommons.org/licenses/by/3.0/us/ We changed the 
logo by stripping the colored background, inverting it and then rotating it 
some.
 
+
+The 'official' HBase color is "International Orange (Engineering)", the 
color of the https://en.wikipedia.org/wiki/International_orange;>Golden Gate 
bridge in San Francisco and for space suits used by NASA.
+
+
+Our 'font' is http://www.dafont.com/bitsumishi.font;>Bitsumishi.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index e6443fb..8d81011 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-06-04
+  Last Published: 
2017-06-06
 
 
 



[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionInputFormat.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionInputFormat.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionInputFormat.html
index 53bbd20..9b36dac 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionInputFormat.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionInputFormat.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class CompactionTool.CompactionInputFormat
+private static class CompactionTool.CompactionInputFormat
 extends org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 Input format that uses store files block location as input 
split locality.
 
@@ -267,7 +267,7 @@ extends 
org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 
 
 CompactionInputFormat
-privateCompactionInputFormat()
+privateCompactionInputFormat()
 
 
 
@@ -284,7 +284,7 @@ extends 
org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 
 
 isSplitable
-protectedbooleanisSplitable(org.apache.hadoop.mapreduce.JobContextcontext,
+protectedbooleanisSplitable(org.apache.hadoop.mapreduce.JobContextcontext,
   org.apache.hadoop.fs.Pathfile)
 
 Overrides:
@@ -298,7 +298,7 @@ extends 
org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 
 
 getSplits
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.mapreduce.InputSplitgetSplits(org.apache.hadoop.mapreduce.JobContextjob)
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.mapreduce.InputSplitgetSplits(org.apache.hadoop.mapreduce.JobContextjob)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Returns a split for each store files directory using the 
block location
  of each file as locality reference.
@@ -316,7 +316,7 @@ extends 
org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 
 
 getStoreDirHosts
-private statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]getStoreDirHosts(org.apache.hadoop.fs.FileSystemfs,
+private statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]getStoreDirHosts(org.apache.hadoop.fs.FileSystemfs,
  org.apache.hadoop.fs.Pathpath)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 return the top hosts of the store files, used by the 
Split
@@ -332,7 +332,7 @@ extends 
org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 
 
 createInputFile
-public staticvoidcreateInputFile(org.apache.hadoop.fs.FileSystemfs,
+public staticvoidcreateInputFile(org.apache.hadoop.fs.FileSystemfs,
org.apache.hadoop.fs.Pathpath,
http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in 
java.util">Setorg.apache.hadoop.fs.PathtoCompactDirs)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionMapper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionMapper.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionMapper.html
index ee416ad..42fffc6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionMapper.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactionTool.CompactionMapper.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class CompactionTool.CompactionMapper
+private static class CompactionTool.CompactionMapper
 extends 
org.apache.hadoop.mapreduce.Mapperorg.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text,org.apache.hadoop.io.NullWritable,org.apache.hadoop.io.NullWritable
 
 
@@ -244,7 +244,7 @@ extends 
org.apache.hadoop.mapreduce.Mapperorg.apache.hadoop.io.LongWritable,
 
 
 compactor
-privateCompactionTool.CompactionWorker compactor
+privateCompactionTool.CompactionWorker 

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
index 50ee3f5..ee4886c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Configuration")
-public class StochasticLoadBalancer
+public class StochasticLoadBalancer
 extends BaseLoadBalancer
 This is a best effort load balancer. Given a Cost 
function F(C) = x It will
  randomly try and mutate the cluster to Cprime. If F(Cprime)  F(C) then the
@@ -221,7 +221,7 @@ extends (package private) static class
 StochasticLoadBalancer.LocalityCostFunction
 Compute a cost of a potential cluster configuration based 
upon where
- StoreFiles are 
located.
+ StoreFiles are 
located.
 
 
 
@@ -641,7 +641,7 @@ extends 
 
 STEPS_PER_REGION_KEY
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String STEPS_PER_REGION_KEY
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String STEPS_PER_REGION_KEY
 
 See Also:
 Constant
 Field Values
@@ -654,7 +654,7 @@ extends 
 
 MAX_STEPS_KEY
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MAX_STEPS_KEY
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MAX_STEPS_KEY
 
 See Also:
 Constant
 Field Values
@@ -667,7 +667,7 @@ extends 
 
 RUN_MAX_STEPS_KEY
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RUN_MAX_STEPS_KEY
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RUN_MAX_STEPS_KEY
 
 See Also:
 Constant
 Field Values
@@ -680,7 +680,7 @@ extends 
 
 MAX_RUNNING_TIME_KEY
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MAX_RUNNING_TIME_KEY
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MAX_RUNNING_TIME_KEY
 
 See Also:
 Constant
 Field Values
@@ -693,7 +693,7 @@ extends 
 
 KEEP_REGION_LOADS
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String KEEP_REGION_LOADS
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String KEEP_REGION_LOADS
 
 See Also:
 Constant
 Field Values
@@ -706,7 +706,7 @@ extends 
 
 TABLE_FUNCTION_SEP
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String TABLE_FUNCTION_SEP
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String TABLE_FUNCTION_SEP
 
 See Also:
 Constant
 Field Values
@@ -719,7 +719,7 @@ extends 
 
 MIN_COST_NEED_BALANCE_KEY
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MIN_COST_NEED_BALANCE_KEY
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MIN_COST_NEED_BALANCE_KEY
 
 See Also:
 Constant
 Field Values
@@ -732,7 +732,7 @@ extends 
 
 RANDOM
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random RANDOM
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random RANDOM
 
 
 
@@ -741,7 +741,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -750,7 +750,7 @@ extends 
 
 loads
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetBulkTime.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetBulkTime.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetBulkTime.html
new file mode 100644
index 000..cc2afbe
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.GetBulkTime.html
@@ -0,0 +1,294 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+StoreFileComparators.GetBulkTime (Apache HBase 2.0.0-SNAPSHOT 
API)
+
+
+
+
+
+var methods = {"i0":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver
+Class 
StoreFileComparators.GetBulkTime
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.regionserver.StoreFileComparators.GetBulkTime
+
+
+
+
+
+
+
+All Implemented Interfaces:
+com.google.common.base.FunctionStoreFile,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+
+
+Enclosing class:
+StoreFileComparators
+
+
+
+private static class StoreFileComparators.GetBulkTime
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements com.google.common.base.FunctionStoreFile,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Modifier
+Constructor and Description
+
+
+private 
+GetBulkTime()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All MethodsInstance MethodsConcrete Methods
+
+Modifier and Type
+Method and Description
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+apply(StoreFilesf)
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+
+
+
+
+
+Methods inherited from interfacecom.google.common.base.Function
+equals
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Constructor Detail
+
+
+
+
+
+GetBulkTime
+privateGetBulkTime()
+
+
+

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
index 4f40861..5eb9bba 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CacheConfig.html
@@ -570,7 +570,7 @@
 
 
 private CacheConfig
-StoreFile.cacheConf
+HStoreFile.cacheConf
 
 
 protected CacheConfig
@@ -590,20 +590,24 @@
 
 
 
-(package private) CacheConfig
+CacheConfig
 StoreFile.getCacheConf()
 
 
 CacheConfig
-HRegionServer.getCacheConfig()
+HStoreFile.getCacheConf()
 
 
 CacheConfig
+HRegionServer.getCacheConfig()
+
+
+CacheConfig
 Store.getCacheConfig()
 Used for tests.
 
 
-
+
 CacheConfig
 HStore.getCacheConfig()
 
@@ -661,50 +665,48 @@
org.apache.hadoop.fs.FileSystemfs)
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- org.apache.hadoop.fs.Pathp,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  org.apache.hadoop.fs.Pathp,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType)
 Deprecated.
 Now we will specific 
whether the StoreFile is for primary replica when
- constructing, so please use
- StoreFile.StoreFile(FileSystem,
 Path, Configuration, CacheConfig, BloomType, boolean)
- directly.
+ constructing, so please use HStoreFile.HStoreFile(FileSystem,
 Path, Configuration,
+ CacheConfig, BloomType, boolean) directly.
 
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- org.apache.hadoop.fs.Pathp,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType,
- booleanprimaryReplica)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  org.apache.hadoop.fs.Pathp,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType,
+  booleanprimaryReplica)
 Constructor, loads a reader and it's indices, etc.
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- StoreFileInfofileInfo,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  StoreFileInfofileInfo,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType)
 Deprecated.
 Now we will specific 
whether the StoreFile is for primary replica when
- constructing, so please use
- StoreFile.StoreFile(FileSystem,
 StoreFileInfo, Configuration, CacheConfig, BloomType, boolean)
- directly.
+ constructing, so please use HStoreFile.HStoreFile(FileSystem,
 StoreFileInfo,
+ Configuration, CacheConfig, BloomType, boolean) 
directly.
 
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- StoreFileInfofileInfo,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType,
- booleanprimaryReplica)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  StoreFileInfofileInfo,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType,
+  booleanprimaryReplica)
 Constructor, loads a reader and it's indices, etc.
 
 
@@ -899,7 +901,7 @@
 intmaxKeys,
 HFile.Writerwriter)
 Creates a new Delete Family Bloom filter at the time of
- StoreFile writing.
+ StoreFile writing.
 
 
 
@@ -910,7 +912,7 @@
  intmaxKeys,
  HFile.Writerwriter)
 Creates a new general (Row or RowCol) Bloom filter at the 
time of
- StoreFile writing.
+ StoreFile writing.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index 6fec015..63d7b92 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -223,7 +223,7 @@
 intmaxKeys,
 HFile.Writerwriter)
 Creates a new Delete Family Bloom filter at the time of
- StoreFile writing.
+ StoreFile writing.
 
 
 
@@ -234,7 +234,7 @@
  

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
index cfed3dd..ecd13b1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.CompletedProcedureCleaner.html
@@ -288,7 +288,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout, getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 <
 a 
href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#releaseLock-TEnvironment-">releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState, setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails, toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate, setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes, setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails, toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
index 0ab7f2b..6e32eab 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/ProcedureInMemoryChore.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -243,7 +243,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout, getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 <
 a 
href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#releaseLock-TEnvironment-">releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState, setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails, toStringSimpleSB,
 

[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.html
index 5c8ee8c..3e64666 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.html
@@ -331,7 +331,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner, 
setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.html
index a6aa420..99f9e9a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.html
@@ -319,7 +319,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 s
 etChildrenLatch, setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 

[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
index fd5f5bf..4da64b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-PrevClass
+PrevClass
 NextClass
 
 
@@ -396,7 +396,7 @@ implements 
 
-PrevClass
+PrevClass
 NextClass
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
index 6f3e7f8..056e442 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
@@ -360,7 +360,7 @@ extends 
 void
 postCompact(Storestore,
-   StoreFileresultFile,
+   StoreFileresultFile,
CompactionRequestrequest,
Useruser)
 Called after the store compaction has completed.
@@ -369,10 +369,10 @@ extends 
 void
 postCompactSelection(Storestore,
-com.google.common.collect.ImmutableListStoreFileselected,
+com.google.common.collect.ImmutableListStoreFileselected,
 CompactionRequestrequest,
 Useruser)
-Called after the StoreFiles to be 
compacted have been selected from the available
+Called after the StoreFiles to be 
compacted have been selected from the available
  candidates.
 
 
@@ -409,7 +409,7 @@ extends 
 void
 postFlush(Storestore,
- StoreFilestoreFile)
+ StoreFilestoreFile)
 Invoked after a memstore flush
 
 
@@ -605,10 +605,10 @@ extends 
 boolean
 preCompactSelection(Storestore,
-   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilecandidates,
+   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilecandidates,
CompactionRequestrequest,
Useruser)
-Called prior to selecting the StoreFiles for 
compaction from the list of currently
+Called prior to selecting the StoreFiles for 
compaction from the list of currently
  available candidates.
 
 
@@ -1031,11 +1031,11 @@ extends 
 preCompactSelection
 publicbooleanpreCompactSelection(Storestore,
-   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilecandidates,
+   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilecandidates,
CompactionRequestrequest,
Useruser)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Called prior to selecting the StoreFiles for 
compaction from the list of currently
+Called prior to selecting the StoreFiles for 
compaction from the list of currently
  available candidates.
 
 Parameters:
@@ -1056,10 +1056,10 @@ extends 
 postCompactSelection
 publicvoidpostCompactSelection(Storestore,
- com.google.common.collect.ImmutableListStoreFileselected,
+ com.google.common.collect.ImmutableListStoreFileselected,
  CompactionRequestrequest,
  Useruser)
-Called after the StoreFiles to be 
compacted have been selected from the available
+Called after the StoreFiles to be 
compacted have been selected from the available
  candidates.
 
 Parameters:
@@ -1100,7 +1100,7 @@ extends 
 postCompact
 publicvoidpostCompact(Storestore,
-StoreFileresultFile,
+StoreFileresultFile,
 CompactionRequestrequest,
 Useruser)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -1186,7 +1186,7 @@ extends 
 postFlush
 publicvoidpostFlush(Storestore,
-  StoreFilestoreFile)
+  StoreFilestoreFile)

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/OnePhaseProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/OnePhaseProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/OnePhaseProcedure.html
index bad5545..5cf3dc5 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/OnePhaseProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/OnePhaseProcedure.html
@@ -180,7 +180,7 @@ extends Procedure
-abort,
 acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 deserializeStateData, href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doAcquireLock-TEnvironment-">doAcquireLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doExecute-TEnvironment-">doExecute,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doReleaseLock-TEnvironment-">doReleaseLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doRollback-TEnvironment-">doRollback,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#elapsedTime--">elapsedTime,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#execute-TEnvironment-">execute,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getChildrenLatch--">getChildrenLatch,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getException--">getException,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getLas
 tUpdate--">getLastUpdate, getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent, hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnab
 le, isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 releaseLock,
 removeStackIndex,
 rollback,
 serializeStateData,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId, setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure, shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails,
 toStringSimpleSB,
 toStringState,
 tryRunnable
 , updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+abort,
 acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 deserializeStateData, href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doAcquireLock-TEnvironment-">doAcquireLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doExecute-TEnvironment-">doExecute,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doReleaseLock-TEnvironment-">doReleaseLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doRollback-TEnvironment-">doRollback,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#elapsedTime--">elapsedTime,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#execute-TEnvironment-">execute,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getChildrenLatch--">getChildrenLatch,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getException--">getException,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getLas
 tUpdate--">getLastUpdate, getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing, isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 releaseLock,
 removeStackIndex,
 rollback,
 serializeStateData,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner, setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout, setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails,
 toStringSimpleSB,
 toStringState, tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/Procedure.LockState.html
--
diff --git 

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
index 909f75b..d39e8f3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.html
@@ -360,7 +360,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback, elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getPr
 ocIdHashCode, getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch, 
isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId, setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback, elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout, href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasChildren--">hasChildren,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasException--">hasException,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasLock-TEnvironment-">hasLock,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasOwner--">hasOwner,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasParent--">hasParent,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasTimeout--">hasTimeout,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#haveSameParent-org.apache.hadoop.hbase.procedure2.Procedure-org.apache.hadoop.hbase.procedure2.Procedure-">haveSameParent,
 > holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 



[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/TagUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/TagUtil.html 
b/devapidocs/org/apache/hadoop/hbase/TagUtil.html
index e626561..b0b9f8e 100644
--- a/devapidocs/org/apache/hadoop/hbase/TagUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/TagUtil.html
@@ -312,7 +312,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 EMPTY_TAGS_ITR
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag EMPTY_TAGS_ITR
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag EMPTY_TAGS_ITR
 Iterator returned when no Tags. Used by CellUtil too.
 
 
@@ -576,7 +576,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 checkForTagsLength
-public staticvoidcheckForTagsLength(inttagsLength)
+public staticvoidcheckForTagsLength(inttagsLength)
 Check the length of tags. If it is invalid, throw 
IllegalArgumentException
 
 Parameters:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
index cf706c8..3191f99 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.FileableStoreFile.html
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 private static class HFileArchiver.FileableStoreFile
 extends HFileArchiver.File
-HFileArchiver.File adapter for 
a StoreFile living on a 
FileSystem
+HFileArchiver.File adapter for 
a StoreFile living on a 
FileSystem
  .
 
 
@@ -141,7 +141,7 @@ extends Field and Description
 
 
-(package private) StoreFile
+(package private) StoreFile
 file
 
 
@@ -167,7 +167,7 @@ extends 
 FileableStoreFile(org.apache.hadoop.fs.FileSystemfs,
- StoreFilestore)
+ StoreFilestore)
 
 
 
@@ -249,7 +249,7 @@ extends 
 
 file
-StoreFile file
+StoreFile file
 
 
 
@@ -267,7 +267,7 @@ extends 
 FileableStoreFile
 publicFileableStoreFile(org.apache.hadoop.fs.FileSystemfs,
- StoreFilestore)
+ StoreFilestore)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html 
b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
index e99e39f..454bfc0 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.StoreToFile.html
@@ -100,7 +100,7 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
-org.apache.hadoop.hbase.backup.HFileArchiver.FileConverterStoreFile
+org.apache.hadoop.hbase.backup.HFileArchiver.FileConverterStoreFile
 
 
 org.apache.hadoop.hbase.backup.HFileArchiver.StoreToFile
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 All Implemented Interfaces:
-com.google.common.base.FunctionStoreFile,HFileArchiver.File
+com.google.common.base.FunctionStoreFile,HFileArchiver.File
 
 
 Enclosing class:
@@ -123,8 +123,8 @@ var activeTableTab = "activeTableTab";
 
 
 private static class HFileArchiver.StoreToFile
-extends HFileArchiver.FileConverterStoreFile
-Convert the StoreFile into something 
we can manage in the archive
+extends HFileArchiver.FileConverterStoreFile
+Convert the StoreFile into something 
we can manage in the archive
  methods
 
 
@@ -178,7 +178,7 @@ extends 
 HFileArchiver.File
-apply(StoreFileinput)
+apply(StoreFileinput)
 
 
 
@@ -232,7 +232,7 @@ extends 
 
 apply
-publicHFileArchiver.Fileapply(StoreFileinput)
+publicHFileArchiver.Fileapply(StoreFileinput)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html 
b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
index 65ef694..133f404 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/HFileArchiver.html
@@ -112,7 +112,7 @@ var 

[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.html
index 2d3f6cf..6eb9d08 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.html
@@ -380,7 +380,7 @@ extends Procedure
-acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout, getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner, setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime, setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails,
 toStringSimpleS
 B, tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 releaseLock,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey, setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState, setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringClassDetails,
 toStringDetails, toStringSimpleSB,
 tryRunnable,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
index 5331e73..8559f5a 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/TwoPhaseProcedure.html
@@ -180,7 +180,7 @@ extends Procedure
-abort,
 acquireLock,
 addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 deserializeStateData, href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doAcquireLock-TEnvironment-">doAcquireLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doExecute-TEnvironment-">doExecute,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doReleaseLock-TEnvironment-">doReleaseLock,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#doRollback-TEnvironment-">doRollback,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#elapsedTime--">elapsedTime,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#execute-TEnvironment-">execute,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getChildrenLatch--">getChildrenLatch,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getException--">getException,
 > href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getLas
 tUpdate--">getLastUpdate, getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent, hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnab
 le, isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 releaseLock,
 removeStackIndex,
 

[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index fe8187b..b6e0403 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -211,9 +211,9 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.MarkRegionOfflineOpResult
 org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
 org.apache.hadoop.hbase.master.procedure.ServerProcedureInterface.ServerOperationType
-org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.MarkRegionOfflineOpResult
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.html 
b/devapidocs/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.html
index 4e1853c..cdea00d 100644
--- a/devapidocs/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -357,7 +357,7 @@ implements org.apache.hadoop.metrics2.MetricsInfo
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/metrics/OperationMetrics.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/metrics/OperationMetrics.html 
b/devapidocs/org/apache/hadoop/hbase/metrics/OperationMetrics.html
new file mode 100644
index 000..e714fcc
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/metrics/OperationMetrics.html
@@ -0,0 +1,415 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+OperationMetrics (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.metrics
+Class OperationMetrics
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.metrics.OperationMetrics
+
+
+
+
+
+
+
+
+public class OperationMetrics
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Container class for commonly collected metrics for most 
operations. Instantiate this class to
+ collect submitted count, failed count and time histogram for an 
operation.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+FAILED_COUNT
+
+
+private Counter
+failedCounter
+
+
+private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+SUBMITTED_COUNT
+
+
+private Counter
+submittedCounter
+
+
+private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 

[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
new file mode 100644
index 000..5c5be22
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStoreFile.html
@@ -0,0 +1,1424 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+HStoreFile (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver
+Class HStoreFile
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.regionserver.HStoreFile
+
+
+
+
+
+
+
+All Implemented Interfaces:
+StoreFile
+
+
+
+@InterfaceAudience.Private
+public class HStoreFile
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements StoreFile
+A Store data file.  Stores usually have one or more of 
these files.  They
+ are produced by flushing the memstore to disk.  To
+ create, instantiate a writer using StoreFileWriter.Builder
+ and append data. Be sure to add any metadata before calling close on the
+ Writer (Use the appendMetadata convenience methods). On close, a StoreFile
+ is sitting in the Filesystem.  To refer to it, create a StoreFile instance
+ passing filesystem and path.  To read, call initReader()
+ StoreFiles may also reference store files in another Store.
+
+ The reason for this weird pattern where you use a different instance for the
+ writer and a reader is that we write once but read a lot more.
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+private CacheConfig
+cacheConf
+
+
+private BloomType
+cfBloomType
+Bloom filter type specified in column family 
configuration.
+
+
+
+private boolean
+compactedAway
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorCell
+comparator
+
+
+private static boolean
+DEFAULT_STORE_FILE_READER_NO_READAHEAD
+
+
+private boolean
+excludeFromMinorCompaction
+
+
+private StoreFileInfo
+fileInfo
+
+
+private Cell
+firstKey
+
+
+private org.apache.hadoop.fs.FileSystem
+fs
+
+
+private Cell
+lastKey
+
+
+private static 
org.apache.commons.logging.Log
+LOG
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
+majorCompaction
+
+
+private long
+maxMemstoreTS
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Mapbyte[],byte[]
+metadataMap
+Map of the metadata entries in the corresponding 
HFile.
+
+
+
+private boolean
+noReadahead
+
+
+private boolean
+primaryReplica
+
+
+private StoreFileReader
+reader
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicInteger
+refCount
+
+
+private long
+sequenceid
+
+
+
+
+
+
+Fields inherited from 
interfaceorg.apache.hadoop.hbase.regionserver.StoreFile
+BLOOM_FILTER_TYPE_KEY,
 BULKLOAD_TASK_KEY,
 BULKLOAD_TIME_KEY,
 DELETE_FAMILY_COUNT,
 EARLIEST_PUT_TS,
 EXCLUDE_FROM_MINOR_COMPACTION_KEY,
 LAST_BLOOM_KEY,
 MAJOR_COMPACTION_KEY,
 MAX_SEQ_ID_KEY,
 MOB_CELLS_COUNT,
 SKIP_RESET_SEQ_ID,
 

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.html
index d38038b..3e2bda7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.html
@@ -384,7 +384,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner, 
setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.html
index 26235ba..1bda523 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.html
@@ -416,7 +416,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 200d9bd..fde0305 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2017 The Apache Software Foundation
 
-  File: 2223,
- Errors: 14486,
+  File: 2227,
+ Errors: 14468,
  Warnings: 0,
  Infos: 0
   
@@ -1516,6 +1516,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.StoreFileComparators.java;>org/apache/hadoop/hbase/regionserver/StoreFileComparators.java
+
+
+  0
+
+
+  0
+
+
+  1
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter.java;>org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java
 
 
@@ -1698,6 +1712,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure2.ProcedureMetrics.java;>org/apache/hadoop/hbase/procedure2/ProcedureMetrics.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.java;>org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
 
 
@@ -4283,7 +4311,7 @@ under the License.
   0
 
 
-  14
+  2
 
   
   
@@ -6481,7 +6509,7 @@ under the License.
   0
 
 
-  8
+  9
 
   
   
@@ -7503,7 +7531,7 @@ under the License.
   0
 
 
-  2
+  3
 
   
   
@@ -10065,7 +10093,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -11703,7 +11731,7 @@ under the License.
   0
 
 
-  12
+  4
 
   
   
@@ -15749,7 +15777,7 @@ under the License.
   0
 
 
-  8
+  7
 
   
   
@@ -15819,7 +15847,7 @@ under the License.
   0
 
 
-  11
+  10
 
   
   
@@ -17359,7 +17387,7 @@ under the License.
   0
 
 
-  25
+  26
 
   
   
@@ -18689,7 +18717,7 @@ under the License.
   0
 
 
-  21
+  19
 
   
   
@@ -20509,7 +20537,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -20747,7 +20775,7 @@ under the License.
   0
 
 
-  7
+  5
 
   
   
@@ -23183,7 +23211,7 @@ under the License.
   0
 
 
-  6
+  5
 
   
   
@@ -23846,6 +23874,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.metrics.OperationMetrics.java;>org/apache/hadoop/hbase/metrics/OperationMetrics.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/6ad4f21a
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/6ad4f21a
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/6ad4f21a

Branch: refs/heads/asf-site
Commit: 6ad4f21aa220d0655c3f6f44237c62998260ec6a
Parents: ba217cb
Author: jenkins 
Authored: Tue Jun 6 14:59:41 2017 +
Committer: jenkins 
Committed: Tue Jun 6 14:59:41 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  4445 +--
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 .../hbase/quotas/SpaceViolationPolicy.html  | 4 +-
 book.html   | 8 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 25890 -
 checkstyle.rss  |88 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 5 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html|17 +-
 devapidocs/allclasses-noframe.html  |17 +-
 devapidocs/constant-values.html |   105 +-
 devapidocs/deprecated-list.html |30 +-
 devapidocs/index-all.html   |   531 +-
 devapidocs/org/apache/hadoop/hbase/TagUtil.html | 4 +-
 .../backup/HFileArchiver.FileableStoreFile.html |10 +-
 .../hbase/backup/HFileArchiver.StoreToFile.html |12 +-
 .../hadoop/hbase/backup/HFileArchiver.html  |14 +-
 .../class-use/FailedArchiveException.html   | 2 +-
 .../backup/class-use/HFileArchiver.File.html| 4 +-
 .../class-use/HFileArchiver.FileConverter.html  | 2 +-
 .../hadoop/hbase/backup/package-summary.html| 6 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   120 +-
 .../hadoop/hbase/class-use/CellComparator.html  | 9 +-
 .../hbase/class-use/HDFSBlocksDistribution.html | 6 +-
 .../hadoop/hbase/class-use/HRegionInfo.html | 6 +-
 .../apache/hadoop/hbase/class-use/KeyValue.html |18 +-
 .../hadoop/hbase/class-use/TableName.html   | 2 +-
 .../InterfaceAudience.LimitedPrivate.html   | 6 +-
 .../class-use/InterfaceAudience.Private.html|   168 +-
 .../class-use/InterfaceStability.Evolving.html  | 6 +
 .../hbase/classification/package-tree.html  | 6 +-
 .../hbase/client/class-use/Connection.html  | 4 +-
 .../hadoop/hbase/client/class-use/Table.html| 4 +-
 .../hadoop/hbase/client/package-tree.html   |26 +-
 .../RegionObserver.MutationType.html| 4 +-
 .../hbase/coprocessor/RegionObserver.html   |58 +-
 .../coprocessor/class-use/ObserverContext.html  |22 +-
 .../class-use/RegionCoprocessorEnvironment.html |22 +-
 .../example/ZooKeeperScanPolicyObserver.html| 4 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hbase/io/hfile/class-use/CacheConfig.html   |70 +-
 .../hbase/io/hfile/class-use/HFile.Writer.html  | 4 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 4 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |   298 +-
 .../hadoop/hbase/master/MasterServices.html |   163 +-
 .../hbase/master/MetricsAssignmentManager.html  |   160 +-
 .../master/MetricsAssignmentManagerSource.html  |   200 +-
 .../MetricsAssignmentManagerSourceImpl.html |   189 +-
 .../hadoop/hbase/master/MetricsMaster.html  |   109 +-
 .../hbase/master/MetricsMasterSource.html   |   116 +-
 .../hbase/master/MetricsMasterSourceImpl.html   |58 +-
 .../master/assignment/AssignProcedure.html  |97 +-
 ...signmentManager.RegionInTransitionChore.html | 2 +-
 .../master/assignment/AssignmentManager.html|44 +-
 .../assignment/GCMergedRegionsProcedure.html| 2 +-
 .../master/assignment/GCRegionProcedure.html| 2 +-
 .../assignment/MergeTableRegionsProcedure.html  |   119 +-
 .../master/assignment/MoveRegionProcedure.html  | 2 +-
 .../assignment/RegionTransitionProcedure.html   | 2 +-
 ...tTableRegionProcedure.StoreFileSplitter.html |18 +-
 .../assignment/SplitTableRegionProcedure.html   |   149 +-
 .../master/assignment/UnassignProcedure.html|91 +-
 

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 28d0b5d..851a198 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -823,7 +823,7 @@ implements getMasterFileSystem()
 
 
-(package private) MetricsMaster
+MetricsMaster
 getMasterMetrics()
 
 
@@ -2148,7 +2148,13 @@ implements 
 
 getMasterMetrics
-MetricsMastergetMasterMetrics()
+publicMetricsMastergetMasterMetrics()
+
+Specified by:
+getMasterMetricsin
 interfaceMasterServices
+Returns:
+Master's instance of MetricsMaster
+
 
 
 
@@ -2157,7 +2163,7 @@ implements 
 
 initializeZKBasedSystemTrackers
-voidinitializeZKBasedSystemTrackers()
+voidinitializeZKBasedSystemTrackers()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
  http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,
  org.apache.zookeeper.KeeperException,
@@ -2178,7 +2184,7 @@ implements 
 
 finishActiveMasterInitialization
-privatevoidfinishActiveMasterInitialization(MonitoredTaskstatus)
+privatevoidfinishActiveMasterInitialization(MonitoredTaskstatus)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
   http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,
   
org.apache.zookeeper.KeeperException,
@@ -2211,7 +2217,7 @@ implements 
 
 updateConfigurationForSpaceQuotaObserver
-publicvoidupdateConfigurationForSpaceQuotaObserver(org.apache.hadoop.conf.Configurationconf)
+publicvoidupdateConfigurationForSpaceQuotaObserver(org.apache.hadoop.conf.Configurationconf)
 Adds the MasterSpaceQuotaObserver to the list 
of configured Master observers to
  automatically remove space quotas for a table when that table is 
deleted.
 
@@ -,7 +2228,7 @@ implements 
 
 initMobCleaner
-privatevoidinitMobCleaner()
+privatevoidinitMobCleaner()
 
 
 
@@ -2231,7 +2237,7 @@ implements 
 
 createMetaBootstrap
-MasterMetaBootstrapcreateMetaBootstrap(HMastermaster,
+MasterMetaBootstrapcreateMetaBootstrap(HMastermaster,
 MonitoredTaskstatus)
 Create a MasterMetaBootstrap 
instance.
 
@@ -2242,7 +2248,7 @@ implements 
 
 createServerManager
-ServerManagercreateServerManager(MasterServicesmaster)
+ServerManagercreateServerManager(MasterServicesmaster)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Create a ServerManager 
instance.
 
@@ -2257,7 +2263,7 @@ implements 
 
 waitForRegionServers
-privatevoidwaitForRegionServers(MonitoredTaskstatus)
+privatevoidwaitForRegionServers(MonitoredTaskstatus)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
   http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 
@@ -2273,7 +2279,7 @@ implements 
 
 initClusterSchemaService
-voidinitClusterSchemaService()
+voidinitClusterSchemaService()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
   http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 
@@ -2289,7 +2295,7 @@ implements 
 
 initQuotaManager
-voidinitQuotaManager()
+voidinitQuotaManager()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -2303,7 +2309,7 @@ implements 
 
 createQuotaSnapshotNotifier
-SpaceQuotaSnapshotNotifiercreateQuotaSnapshotNotifier()
+SpaceQuotaSnapshotNotifiercreateQuotaSnapshotNotifier()
 
 
 
@@ -2312,7 +2318,7 @@ implements 
 
 isCatalogJanitorEnabled
-booleanisCatalogJanitorEnabled()
+booleanisCatalogJanitorEnabled()
 
 
 
@@ -2321,7 +2327,7 @@ implements 
 
 isCleanerChoreEnabled
-booleanisCleanerChoreEnabled()

[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
index 9a88032..8eac401 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
@@ -237,7 +237,7 @@ the order they are declared.
 
 
 values
-public staticRegionObserver.MutationType[]values()
+public staticRegionObserver.MutationType[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -257,7 +257,7 @@ for (RegionObserver.MutationType c : 
RegionObserver.MutationType.values())
 
 
 valueOf
-public staticRegionObserver.MutationTypevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticRegionObserver.MutationTypevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
index 72c0e19..5e8fb1f 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
@@ -293,7 +293,7 @@ extends default void
 postCompact(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
-   StoreFileresultFile)
+   StoreFileresultFile)
 Deprecated.
 Use postCompact(ObserverContext,
 Store, StoreFile, CompactionRequest)
  instead
@@ -304,7 +304,7 @@ extends default void
 postCompact(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
-   StoreFileresultFile,
+   StoreFileresultFile,
CompactionRequestrequest)
 Called after compaction has completed and the new store 
file has been moved in to place.
 
@@ -313,7 +313,7 @@ extends default void
 postCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
 Storestore,
-com.google.common.collect.ImmutableListStoreFileselected)
+com.google.common.collect.ImmutableListStoreFileselected)
 Deprecated.
 use postCompactSelection(ObserverContext,
 Store, ImmutableList,
  CompactionRequest) instead.
@@ -324,9 +324,9 @@ extends default void
 postCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
 Storestore,
-com.google.common.collect.ImmutableListStoreFileselected,
+com.google.common.collect.ImmutableListStoreFileselected,
 CompactionRequestrequest)
-Called after the StoreFiles to compact 
have been selected from the available
+Called after the StoreFiles to compact 
have been selected from the available
  candidates.
 
 
@@ -369,7 +369,7 @@ extends default void
 postFlush(ObserverContextRegionCoprocessorEnvironmentc,
  Storestore,
- StoreFileresultFile)
+ StoreFileresultFile)
 Called after a Store's memstore is flushed to disk.
 
 
@@ -663,7 +663,7 @@ extends InternalScannerscanner,
   ScanTypescanType,
   CompactionRequestrequest)
-Called prior to writing the StoreFiles selected for 
compaction into a new
+Called prior to writing the StoreFiles selected for 
compaction into a new
  StoreFile.
 
 
@@ -707,7 +707,7 @@ extends InternalScanners,
  CompactionRequestrequest,
  longreadPoint)
-Called prior to writing the StoreFiles selected for 
compaction into a new
+Called prior to writing the StoreFiles selected for 
compaction into a new
  StoreFile and prior to creating the scanner used to read the 
input files.
 
 
@@ -715,7 +715,7 @@ extends default void
 preCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
-   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilecandidates)
+   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class 

[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 765682c..c2e7be2 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -745,9 +745,10 @@
   MemStoreLABmemStoreLAB)
 
 
-(package private) byte[]
-StoreFile.getFileSplitPoint(CellComparatorcomparator)
-Gets the approximate mid-point of this file that is optimal 
for use in splitting it.
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalbyte[]
+StoreUtils.getFileSplitPoint(StoreFilefile,
+ CellComparatorcomparator)
+Gets the approximate mid-point of the given file that is 
optimal for use in splitting it.
 
 
 
@@ -808,7 +809,7 @@
 
 
 DefaultStoreFileManager(CellComparatorkvComparator,
-   http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFilestoreFileComparator,
+   http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFilestoreFileComparator,
org.apache.hadoop.conf.Configurationconf,
CompactionConfigurationcomConf)
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
index 7c193e1..48fa5fa 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
@@ -299,10 +299,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HDFSBlocksDistribution
-Region.getHDFSBlocksDistribution()
+HStoreFile.getHDFSBlockDistribution()
 
 
 HDFSBlocksDistribution
+Region.getHDFSBlocksDistribution()
+
+
+HDFSBlocksDistribution
 HRegion.getHDFSBlocksDistribution()
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 11845f7..04cfc72 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -897,7 +897,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  HRegionInforegionInfo,
  org.apache.hadoop.fs.PathtableDir,
  byte[]family,
- http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles)
+ http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles)
 Remove the store files, either by archiving them or 
outright deletion
 
 
@@ -5174,7 +5174,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 org.apache.hadoop.fs.Path
 HRegionFileSystem.mergeStoreFile(HRegionInfomergedRegion,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-  StoreFilef,
+  StoreFilef,
   org.apache.hadoop.fs.PathmergedDir)
 Write out a merge reference under the given merges 
directory.
 
@@ -5353,7 +5353,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 org.apache.hadoop.fs.Path
 HRegionFileSystem.splitStoreFile(HRegionInfohri,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-  StoreFilef,
+  StoreFilef,
   byte[]splitRow,
   booleantop,
   RegionSplitPolicysplitPolicy)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git 

[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/MetricsMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MetricsMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsMaster.html
index 099dbd7..01e3ca9 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MetricsMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MetricsMaster.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":9,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceStability.Evolving
  @InterfaceAudience.Private
-public class MetricsMaster
+public class MetricsMaster
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 This class is for maintaining the various master statistics
  and publishing them through the metrics interfaces.
@@ -152,6 +152,10 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 private MetricsMasterSource
 masterSource
 
+
+private ProcedureMetrics
+serverCrashProcMetrics
+
 
 
 
@@ -179,52 +183,62 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 Method Summary
 
-All MethodsInstance MethodsConcrete Methods
+All MethodsStatic MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
 
 
+static ProcedureMetrics
+convertToProcedureMetrics(OperationMetricsmetrics)
+This is utility function that converts OperationMetrics to ProcedureMetrics.
+
+
+
 MetricsMasterProcSource
 getMetricsProcSource()
 
-
+
 MetricsMasterQuotaSource
 getMetricsQuotaSource()
 
-
+
 MetricsMasterSource
 getMetricsSource()
 
-
+
+ProcedureMetrics
+getServerCrashProcMetrics()
+
+
 void
 incrementQuotaObserverTime(longexecutionTime)
 Sets the execution time of a period of the 
QuotaObserverChore.
 
 
-
+
 void
 incrementRequests(longinc)
 
-
+
 void
 setNumNamespacesInSpaceQuotaViolation(longnumNamespacesInViolation)
 Sets the number of namespaces in violation of a space 
quota.
 
 
-
+
 void
 setNumRegionSizeReports(longnumRegionReports)
 Sets the number of region size reports the master currently 
has in memory.
 
 
-
+
 void
 setNumSpaceQuotas(longnumSpaceQuotas)
 Sets the number of space quotas defined.
 
 
-
+
 void
 setNumTableInSpaceQuotaViolation(longnumTablesInViolation)
 Sets the number of table in violation of a space 
quota.
@@ -258,7 +272,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -267,7 +281,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 masterSource
-privateMetricsMasterSource masterSource
+privateMetricsMasterSource masterSource
 
 
 
@@ -276,16 +290,25 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 masterProcSource
-privateMetricsMasterProcSource masterProcSource
+privateMetricsMasterProcSource masterProcSource
 
 
 
 
 
-
+
 
 masterQuotaSource
-privateMetricsMasterQuotaSource masterQuotaSource
+privateMetricsMasterQuotaSource masterQuotaSource
+
+
+
+
+
+
+
+serverCrashProcMetrics
+privateProcedureMetrics serverCrashProcMetrics
 
 
 
@@ -302,7 +325,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MetricsMaster
-publicMetricsMaster(MetricsMasterWrappermasterWrapper)
+publicMetricsMaster(MetricsMasterWrappermasterWrapper)
 
 
 
@@ -319,7 +342,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getMetricsSource
-publicMetricsMasterSourcegetMetricsSource()
+publicMetricsMasterSourcegetMetricsSource()
 
 
 
@@ -328,7 +351,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getMetricsProcSource
-publicMetricsMasterProcSourcegetMetricsProcSource()
+publicMetricsMasterProcSourcegetMetricsProcSource()
 
 
 
@@ -337,7 +360,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getMetricsQuotaSource
-publicMetricsMasterQuotaSourcegetMetricsQuotaSource()
+publicMetricsMasterQuotaSourcegetMetricsQuotaSource()
 
 
 
@@ -346,7 +369,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 incrementRequests
-publicvoidincrementRequests(longinc)
+publicvoidincrementRequests(longinc)
 
 Parameters:
 inc - How much to add to requests.
@@ 

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityCostFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityCostFunction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityCostFunction.html
index bd76fe4..eb78df6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityCostFunction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.LocalityCostFunction.html
@@ -118,10 +118,10 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class StochasticLoadBalancer.LocalityCostFunction
+static class StochasticLoadBalancer.LocalityCostFunction
 extends StochasticLoadBalancer.CostFunction
 Compute a cost of a potential cluster configuration based 
upon where
- StoreFiles are 
located.
+ StoreFiles are 
located.
 
 
 
@@ -235,7 +235,7 @@ extends 
 
 LOCALITY_COST_KEY
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOCALITY_COST_KEY
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOCALITY_COST_KEY
 
 See Also:
 Constant
 Field Values
@@ -248,7 +248,7 @@ extends 
 
 DEFAULT_LOCALITY_COST
-private static finalfloat DEFAULT_LOCALITY_COST
+private static finalfloat DEFAULT_LOCALITY_COST
 
 See Also:
 Constant
 Field Values
@@ -261,7 +261,7 @@ extends 
 
 services
-privateMasterServices services
+privateMasterServices services
 
 
 
@@ -278,7 +278,7 @@ extends 
 
 LocalityCostFunction
-LocalityCostFunction(org.apache.hadoop.conf.Configurationconf,
+LocalityCostFunction(org.apache.hadoop.conf.Configurationconf,
  MasterServicessrv)
 
 
@@ -296,7 +296,7 @@ extends 
 
 setServices
-voidsetServices(MasterServicessrvc)
+voidsetServices(MasterServicessrvc)
 
 
 
@@ -305,7 +305,7 @@ extends 
 
 cost
-doublecost()
+doublecost()
 
 Specified by:
 costin
 classStochasticLoadBalancer.CostFunction

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MemstoreSizeCostFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MemstoreSizeCostFunction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MemstoreSizeCostFunction.html
index c138f2c..fb835d0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MemstoreSizeCostFunction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MemstoreSizeCostFunction.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class StochasticLoadBalancer.MemstoreSizeCostFunction
+static class StochasticLoadBalancer.MemstoreSizeCostFunction
 extends StochasticLoadBalancer.CostFromRegionLoadAsRateFunction
 Compute the cost of total memstore size.  The more 
unbalanced the higher the
  computed cost will be.  This uses a rolling average of regionload.
@@ -250,7 +250,7 @@ extends 
 
 MEMSTORE_SIZE_COST_KEY
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MEMSTORE_SIZE_COST_KEY
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MEMSTORE_SIZE_COST_KEY
 
 See Also:
 Constant
 Field Values
@@ -263,7 +263,7 @@ extends 
 
 DEFAULT_MEMSTORE_SIZE_COST
-private static finalfloat DEFAULT_MEMSTORE_SIZE_COST
+private static finalfloat DEFAULT_MEMSTORE_SIZE_COST
 
 See Also:
 Constant
 Field Values
@@ -284,7 +284,7 @@ extends 
 
 MemstoreSizeCostFunction
-MemstoreSizeCostFunction(org.apache.hadoop.conf.Configurationconf)
+MemstoreSizeCostFunction(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -301,7 +301,7 @@ extends 
 
 getCostFromRl
-protecteddoublegetCostFromRl(BalancerRegionLoadrl)
+protecteddoublegetCostFromRl(BalancerRegionLoadrl)
 
 Specified by:
 getCostFromRlin
 classStochasticLoadBalancer.CostFromRegionLoadFunction

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MoveCostFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MoveCostFunction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.MoveCostFunction.html
index d126bea..6a00fea 100644
--- 

[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html
index a0e8d7f..a69481a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class DefaultStoreFileManager
+class DefaultStoreFileManager
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements StoreFileManager
 Default implementation of StoreFileManager. Not 
thread-safe.
@@ -145,7 +145,7 @@ implements comConf
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 compactedfiles
 List of compacted files inside this store that needs to be 
excluded in reads
  because further new reads will be using only the newly created files out of 
compaction.
@@ -160,11 +160,11 @@ implements LOG
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFile
 storeFileComparator
 
 
-private 
com.google.common.collect.ImmutableListStoreFile
+private 
com.google.common.collect.ImmutableListStoreFile
 storefiles
 List of store files inside this store.
 
@@ -185,7 +185,7 @@ implements 
 DefaultStoreFileManager(CellComparatorkvComparator,
-   http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFilestoreFileComparator,
+   http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFilestoreFileComparator,
org.apache.hadoop.conf.Configurationconf,
CompactionConfigurationcomConf)
 
@@ -206,31 +206,31 @@ implements 
 void
-addCompactionResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilenewCompactedfiles,
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFileresults)
+addCompactionResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilenewCompactedfiles,
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFileresults)
 Adds only the new compaction results into the 
structure.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
 clearCompactedFiles()
 Clears all the compacted files and returns them.
 
 
 
-com.google.common.collect.ImmutableCollectionStoreFile
+com.google.common.collect.ImmutableCollectionStoreFile
 clearFiles()
 Clears all the files currently in use and returns 
them.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
+http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
 getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
 Gets initial, full list of candidate store files to check 
for row-key-before.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
 getCompactedfiles()
 List of compacted files inside this store that needs to be 
excluded in reads
  because further new reads will be using only the newly created files out of 
compaction.
@@ -241,7 +241,7 @@ implements getCompactionPressure()
 
 

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
index 546ed64..a8b9527 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/AssignProcedure.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AssignProcedure
+public class AssignProcedure
 extends RegionTransitionProcedure
 Procedure that describe the assignment of a single region.
  There can only be one RegionTransitionProcedure per region running at a time
@@ -272,71 +272,78 @@ extends RegionStates.RegionStateNoderegionNode)
 
 
+protected ProcedureMetrics
+getProcedureMetrics(MasterProcedureEnvenv)
+Override this method to provide procedure specific counters 
for submitted count, failed
+ count and time histogram.
+
+
+
 ServerName
 getServer(MasterProcedureEnvenv)
 Used by ServerCrashProcedure to see if this Assign/Unassign 
needs processing.
 
 
-
+
 TableProcedureInterface.TableOperationType
 getTableOperationType()
 Given an operation type we can take decisions about what to 
do with pending operations.
 
 
-
+
 private void
 handleFailure(MasterProcedureEnvenv,
  RegionStates.RegionStateNoderegionNode)
 Called when dispatch or subsequent OPEN request fail.
 
 
-
+
 private boolean
 incrementAndCheckMaxAttempts(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
 
-
+
 protected boolean
 isRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionStatestate)
 
-
+
 RemoteProcedureDispatcher.RemoteOperation
 remoteCallBuild(MasterProcedureEnvenv,
ServerNameserverName)
 
-
+
 protected void
 remoteCallFailed(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in 
java.io">IOExceptionexception)
 
-
+
 protected void
 reportTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCodecode,
 longopenSeqNum)
 
-
+
 void
 serializeStateData(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamstream)
 The user-level code of the procedure may have some state to
  persist (e.g.
 
 
-
+
 protected boolean
 startTransition(MasterProcedureEnvenv,
RegionStates.RegionStateNoderegionNode)
 
-
+
 void
 toStringClassDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
 Extend the toString() information with the procedure details
  e.g.
 
 
-
+
 protected boolean
 updateTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
@@ -385,7 +392,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -394,7 +401,7 @@ extends 
 
 forceNewPlan
-privateboolean forceNewPlan
+privateboolean forceNewPlan
 
 
 
@@ -403,7 +410,7 @@ extends 
 
 targetServer
-protected volatileServerName targetServer
+protected volatileServerName targetServer
 Gets set as desired target on move, merge, etc., when we 
want to go to a particular server.
  We may not be able to respect this request but will try. When it is NOT set, 
then we ask
  the balancer to assign. This value is used below in startTransition to set 
regionLocation if
@@ -425,7 +432,7 @@ extends 
 
 AssignProcedure
-publicAssignProcedure()
+publicAssignProcedure()
 
 
 
@@ -434,7 +441,7 @@ extends 
 
 AssignProcedure
-publicAssignProcedure(HRegionInforegionInfo)
+publicAssignProcedure(HRegionInforegionInfo)
 
 
 
@@ -443,7 +450,7 @@ extends 
 
 AssignProcedure
-publicAssignProcedure(HRegionInforegionInfo,
+publicAssignProcedure(HRegionInforegionInfo,
booleanforceNewPlan)
 
 
@@ -453,7 +460,7 @@ extends 
 
 

[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html 
b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
index fa6..33173d3 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/MasterServices.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -285,108 +285,112 @@ extends getMasterFileSystem()
 
 
+MetricsMaster
+getMasterMetrics()
+
+
 ProcedureExecutorMasterProcedureEnv
 getMasterProcedureExecutor()
 
-
+
 MasterProcedureManagerHost
 getMasterProcedureManagerHost()
 
-
+
 MasterQuotaManager
 getMasterQuotaManager()
 
-
+
 MasterWalManager
 getMasterWalManager()
 
-
+
 RegionNormalizer
 getRegionNormalizer()
 
-
+
 ReplicationPeerConfig
 getReplicationPeerConfig(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringpeerId)
 Returns the configured ReplicationPeerConfig for the 
specified peer
 
 
-
+
 ServerManager
 getServerManager()
 
-
+
 SnapshotManager
 getSnapshotManager()
 
-
+
 TableDescriptors
 getTableDescriptors()
 
-
+
 TableStateManager
 getTableStateManager()
 
-
+
 boolean
 isActiveMaster()
 
-
+
 boolean
 isInitialized()
 
-
+
 boolean
 isInMaintenanceMode()
 
-
+
 boolean
 isServerCrashProcessingEnabled()
 
-
+
 boolean
 isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
 
-
+
 boolean
 isStopping()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName
 listDrainingRegionServers()
 List region servers marked as draining to not get 
additional regions assigned to them.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListLockInfo
 listLocks()
 List locks
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListProcedureInfo
 listProcedures()
 List procedures
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationPeerDescription
 listReplicationPeers(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Return a list of replication peers.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHTableDescriptor
 listTableDescriptorsByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get list of table descriptors by namespace
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableName
 listTableNamesByNamespace(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Get list of table names by namespace
 
 
-
+
 long
 mergeRegions(HRegionInfo[]regionsToMerge,
 booleanforcible,
@@ -395,7 +399,7 @@ extends Merge regions in a table.
 
 
-
+
 long
 modifyColumn(TableNametableName,
 HColumnDescriptordescriptor,
@@ -404,7 +408,7 @@ extends Modify the column descriptor of an existing column in an 
existing table
 
 
-
+
 long
 modifyTable(TableNametableName,
HTableDescriptordescriptor,
@@ -413,25 +417,25 @@ extends Modify the descriptor of an existing table
 
 
-
+
 boolean
 registerService(com.google.protobuf.Serviceinstance)
 Registers a new protocol buffer Service 
subclass as a 

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html
index ca56dfc..ca03f9b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.html
@@ -408,7 +408,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback, elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId,
 getPr
 ocIdHashCode, getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch, 
isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId, setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 doAcquireLock,
 doExecute,
 doReleaseLock,
 doRollback, elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout, href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasChildren--">hasChildren,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasException--">hasException,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasLock-TEnvironment-">hasLock,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasOwner--">hasOwner,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasParent--">hasParent,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#hasTimeout--">hasTimeout,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#haveSameParent-org.apache.hadoop.hbase.procedure2.Procedure-org.apache.hadoop.hbase.procedure2.Procedure-">haveSameParent,
 > holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.html
index addfeeb..e029a4f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.html
@@ -409,7 +409,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileScanner.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileScanner.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileScanner.html
index d28a9f8..97dbeb2 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileScanner.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileScanner.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Coprocessor")
-public class StoreFileScanner
+public class StoreFileScanner
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements KeyValueScanner
 KeyValueScanner adaptor over the Reader.  It also provides 
hooks into
@@ -287,7 +287,7 @@ implements 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFileScanner
-getScannersForCompaction(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
+getScannersForCompaction(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
 booleancanUseDropBehind,
 longreadPt)
 Get scanners for compaction.
@@ -295,7 +295,7 @@ implements 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFileScanner
-getScannersForStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
+getScannersForStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
 booleancacheBlocks,
 booleanusePread,
 booleanisCompaction,
@@ -306,7 +306,7 @@ implements 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFileScanner
-getScannersForStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
+getScannersForStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
 booleancacheBlocks,
 booleanusePread,
 booleanisCompaction,
@@ -319,7 +319,7 @@ implements 
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFileScanner
-getScannersForStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
+getScannersForStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefiles,
 booleancacheBlocks,
 booleanusePread,
 longreadPt)
@@ -460,7 +460,7 @@ implements 
 
 reader
-private finalStoreFileReader reader
+private finalStoreFileReader reader
 
 
 
@@ -469,7 +469,7 @@ implements 
 
 hfs
-private finalHFileScanner hfs
+private finalHFileScanner hfs
 
 
 
@@ -478,7 +478,7 @@ implements 
 
 cur
-privateCell cur
+privateCell cur
 
 
 
@@ -487,7 +487,7 @@ implements 
 
 closed
-privateboolean closed
+privateboolean closed
 
 
 
@@ -496,7 +496,7 @@ implements 
 
 realSeekDone
-privateboolean realSeekDone
+privateboolean realSeekDone
 
 
 
@@ -505,7 +505,7 @@ implements 
 
 delayedReseek
-privateboolean delayedReseek
+privateboolean delayedReseek
 
 
 
@@ -514,7 +514,7 @@ implements 
 
 delayedSeekKV
-privateCell delayedSeekKV
+privateCell delayedSeekKV
 
 
 
@@ -523,7 +523,7 @@ implements 
 
 enforceMVCC
-private finalboolean enforceMVCC
+private finalboolean enforceMVCC
 
 
 
@@ -532,7 +532,7 @@ implements 
 
 hasMVCCInfo
-private finalboolean hasMVCCInfo
+private finalboolean hasMVCCInfo
 
 
 
@@ -541,7 +541,7 @@ implements 
 
 stopSkippingKVsIfNextRow
-privateboolean stopSkippingKVsIfNextRow
+privateboolean stopSkippingKVsIfNextRow
 
 
 
@@ -550,7 +550,7 @@ implements 
 
 seekCount
-private statichttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">LongAdder seekCount
+private 

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/BloomType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/BloomType.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/BloomType.html
index 39e9322..c4cff08 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/BloomType.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/BloomType.html
@@ -289,7 +289,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private BloomType
-StoreFile.cfBloomType
+HStoreFile.cfBloomType
 Bloom filter type specified in column family 
configuration.
 
 
@@ -341,50 +341,48 @@ the order they are declared.
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- org.apache.hadoop.fs.Pathp,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  org.apache.hadoop.fs.Pathp,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType)
 Deprecated.
 Now we will specific 
whether the StoreFile is for primary replica when
- constructing, so please use
- StoreFile.StoreFile(FileSystem,
 Path, Configuration, CacheConfig, BloomType, boolean)
- directly.
+ constructing, so please use HStoreFile.HStoreFile(FileSystem,
 Path, Configuration,
+ CacheConfig, BloomType, boolean) directly.
 
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- org.apache.hadoop.fs.Pathp,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType,
- booleanprimaryReplica)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  org.apache.hadoop.fs.Pathp,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType,
+  booleanprimaryReplica)
 Constructor, loads a reader and it's indices, etc.
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- StoreFileInfofileInfo,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  StoreFileInfofileInfo,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType)
 Deprecated.
 Now we will specific 
whether the StoreFile is for primary replica when
- constructing, so please use
- StoreFile.StoreFile(FileSystem,
 StoreFileInfo, Configuration, CacheConfig, BloomType, boolean)
- directly.
+ constructing, so please use HStoreFile.HStoreFile(FileSystem,
 StoreFileInfo,
+ Configuration, CacheConfig, BloomType, boolean) 
directly.
 
 
 
 
-StoreFile(org.apache.hadoop.fs.FileSystemfs,
- StoreFileInfofileInfo,
- org.apache.hadoop.conf.Configurationconf,
- CacheConfigcacheConf,
- BloomTypecfBloomType,
- booleanprimaryReplica)
+HStoreFile(org.apache.hadoop.fs.FileSystemfs,
+  StoreFileInfofileInfo,
+  org.apache.hadoop.conf.Configurationconf,
+  CacheConfigcacheConf,
+  BloomTypecfBloomType,
+  booleanprimaryReplica)
 Constructor, loads a reader and it's indices, etc.
 
 
@@ -482,7 +480,7 @@ the order they are declared.
  intmaxKeys,
  HFile.Writerwriter)
 Creates a new general (Row or RowCol) Bloom filter at the 
time of
- StoreFile writing.
+ StoreFile writing.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
index 242e2f7..5889dd7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/HRegionFileSystem.html
@@ -135,7 +135,7 @@
 private Pairorg.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path
 SplitTableRegionProcedure.splitStoreFile(HRegionFileSystemregionFs,
   byte[]family,
-  StoreFilesf)
+  StoreFilesf)
 
 
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
@@ -155,7 +155,7 @@
 
 StoreFileSplitter(HRegionFileSystemregionFs,
   

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.html 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.html
index 7c563e8..b3dcf62 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6};
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public interface MetricsAssignmentManagerSource
+public interface MetricsAssignmentManagerSource
 extends BaseSource
 
 
@@ -131,11 +131,11 @@ extends 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-ASSIGN_TIME_NAME
+ASSIGN_METRIC_PREFIX
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-BULK_ASSIGN_TIME_NAME
+MERGE_METRIC_PREFIX
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
@@ -199,7 +199,11 @@ extends 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-UNASSIGN_TIME_NAME
+SPLIT_METRIC_PREFIX
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+UNASSIGN_METRIC_PREFIX
 
 
 
@@ -224,45 +228,49 @@ extends Method and Description
 
 
+OperationMetrics
+getAssignMetrics()
+
+
+OperationMetrics
+getMergeMetrics()
+
+
+OperationMetrics
+getSplitMetrics()
+
+
+OperationMetrics
+getUnassignMetrics()
+
+
 void
 incrementOperationCounter()
-Increment the count of assignment operation 
(assign/unassign).
+TODO: Remove.
 
 
-
+
 void
 setRIT(intritCount)
 Set the number of regions in transition.
 
 
-
+
 void
 setRITCountOverThreshold(intritCountOverThreshold)
 Set the count of the number of regions that have been in 
transition over the threshold time.
 
 
-
+
 void
 setRITOldestAge(longage)
 Set the oldest region in transition.
 
 
-
-void
-updateAssignTime(longtime)
-Add the time took to perform the last assign operation
-
-
-
+
 void
 updateRitDuration(longduration)
 
-
-void
-updateUnassignTime(longtime)
-Add the time took to perform the last unassign 
operation
-
-
 
 
 
@@ -291,7 +299,7 @@ extends 
 
 METRICS_NAME
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_NAME
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_NAME
 The name of the metrics
 
 See Also:
@@ -305,7 +313,7 @@ extends 
 
 METRICS_CONTEXT
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_CONTEXT
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_CONTEXT
 The context metrics will be under.
 
 See Also:
@@ -319,7 +327,7 @@ extends 
 
 METRICS_JMX_CONTEXT
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_JMX_CONTEXT
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_JMX_CONTEXT
 The name of the metrics context that metrics will be under 
in jmx
 
 See Also:
@@ -333,7 +341,7 @@ extends 
 
 METRICS_DESCRIPTION
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_DESCRIPTION
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_DESCRIPTION
 Description
 
 See Also:
@@ -347,7 +355,7 @@ extends 
 
 RIT_COUNT_NAME
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RIT_COUNT_NAME
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String RIT_COUNT_NAME
 
 See Also:
 

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
index 2561a8f..ec5a7a9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.html
@@ -335,7 +335,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 s
 etChildrenLatch, setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent, href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#incChildrenLatch--">incChildrenLatch,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isFailed--">isFailed,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isFinished--">isFinished,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isInitializing--">isInitializing,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isRunnable--">isRunnable,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isSuccess--">isSuccess,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#isWaiting--">isWaiting,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#removeStackIndex--">removeStackIndex,
 > href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#setAbortFailure-java.lang.String-java
 .lang.String-">setAbortFailure, setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateM
 etricsOnSubmit, updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
index 1afc74d..0cdf521 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.html
@@ -422,7 +422,7 @@ implements Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 deserializeStateData,
 doAcquireLock,
 doExecute, doReleaseLock,
 doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSame
 Parent, incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 isYieldAfterExecutionStep,
 removeStackIndex, serializeStateData,
 

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
 
b/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
index 031c781..fbb1265 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class PartitionedMobCompactor
+public class PartitionedMobCompactor
 extends MobCompactor
 An implementation of MobCompactor that 
compacts the mob files in partitions.
 
@@ -267,7 +267,7 @@ extends 
 private void
-closeStoreFileReaders(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
+closeStoreFileReaders(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
 Closes the readers of store files.
 
 
@@ -288,7 +288,7 @@ extends 
 private org.apache.hadoop.fs.Path
 compactDelFilesInBatch(PartitionedMobCompactionRequestrequest,
-  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFiledelFiles)
+  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFiledelFiles)
 Compacts the del file in a batch.
 
 
@@ -296,7 +296,7 @@ extends private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Path
 compactMobFilePartition(PartitionedMobCompactionRequestrequest,
PartitionedMobCompactionRequest.CompactionPartitionpartition,
-   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFiledelFiles,
+   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFiledelFiles,
Connectionconnection,
Tabletable)
 Compacts a partition of selected small mob files and all 
the del files.
@@ -314,7 +314,7 @@ extends PartitionedMobCompactionRequest.CompactionPartitionpartition,
   Connectionconnection,
   Tabletable,
-  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesToCompact,
+  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesToCompact,
   intbatch,
   org.apache.hadoop.fs.PathbulkloadPathOfPartition,
   org.apache.hadoop.fs.PathbulkloadColumnPath,
@@ -324,7 +324,7 @@ extends 
 private StoreScanner
-createScanner(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesToCompact,
+createScanner(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesToCompact,
  ScanTypescanType)
 Creates a store scanner.
 
@@ -337,7 +337,7 @@ extends 
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-getFileInfo(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
+getFileInfo(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
 Gets the max seqId and number of cells of the store 
files.
 
 
@@ -350,7 +350,7 @@ extends getLinkedFileStatus(HFileLinklink)
 
 
-(package private) http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+(package private) http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 getListOfDelFilesForPartition(PartitionedMobCompactionRequest.CompactionPartitionpartition,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 324ef65..e8620bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class HStore
+public class HStore
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements Store
 A Store holds a column family in a Region.  Its a memstore 
and a set of zero
@@ -270,7 +270,7 @@ implements family
 
 
-(package private) http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+(package private) http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 filesCompacting
 
 
@@ -432,7 +432,7 @@ implements 
 private void
-addToCompactingFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefilesToAdd)
+addToCompactingFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilefilesToAdd)
 Adds the files to compacting files.
 
 
@@ -455,7 +455,7 @@ implements 
 private void
-bulkLoadHFile(StoreFilesf)
+bulkLoadHFile(StoreFilesf)
 
 
 void
@@ -471,10 +471,10 @@ implements 
 private void
-clearCompactedfiles(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesToRemove)
+clearCompactedfiles(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesToRemove)
 
 
-com.google.common.collect.ImmutableCollectionStoreFile
+com.google.common.collect.ImmutableCollectionStoreFile
 close()
 Close all the readers We don't need to worry about 
subsequent requests because the Region
  holds a write lock that will prevent any more reads or writes.
@@ -487,20 +487,20 @@ implements 
-private StoreFile
+private StoreFile
 commitFile(org.apache.hadoop.fs.Pathpath,
   longlogCacheFlushId,
   MonitoredTaskstatus)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 compact(CompactionContextcompaction,
ThroughputControllerthroughputController)
 Compact the StoreFiles.
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 compact(CompactionContextcompaction,
ThroughputControllerthroughputController,
Useruser)
@@ -513,7 +513,7 @@ implements 
 protected void
-completeCompaction(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles)
+completeCompaction(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles)
 It works by processing a compaction that's been written to 
disk.
 
 
@@ -550,11 +550,11 @@ implements 
-private StoreFile
+private StoreFile
 createStoreFileAndReader(org.apache.hadoop.fs.Pathp)
 
 
-private StoreFile
+private StoreFile
 createStoreFileAndReader(StoreFileInfoinfo)
 
 
@@ -803,7 +803,7 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListKeyValueScanner
-getScanners(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefiles,
+getScanners(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefiles,
booleancacheBlocks,
booleanusePread,
booleanisCompaction,
@@ -857,7 +857,7 @@ implements 
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 

[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
index 428084c..453f9b7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -134,7 +134,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class SplitTableRegionProcedure
+public class SplitTableRegionProcedure
 extends AbstractStateMachineRegionProcedureorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionState
 The procedure to split a region in a table.
  Takes lock on the parent region.
@@ -326,121 +326,128 @@ extends getParentRegionServerName(MasterProcedureEnvenv)
 
 
+protected ProcedureMetrics
+getProcedureMetrics(MasterProcedureEnvenv)
+Override this method to provide procedure specific counters 
for submitted count, failed
+ count and time histogram.
+
+
+
 private int
 getRegionReplication(MasterProcedureEnvenv)
 
-
+
 private byte[]
 getSplitRow()
 
-
+
 protected 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionState
 getState(intstateId)
 Convert an ordinal (or state id) to an Enum (or more 
descriptive) state object.
 
 
-
+
 protected int
 getStateId(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
 Convert the Enum (or more descriptive) state object to an 
ordinal (or state id).
 
 
-
+
 TableProcedureInterface.TableOperationType
 getTableOperationType()
 Given an operation type we can take decisions about what to 
do with pending operations.
 
 
-
+
 protected boolean
 isRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
 Used by the default implementation of abort() to know if 
the current state can be aborted
  and rollback can be triggered.
 
 
-
+
 private boolean
 isTraceEnabled()
 The procedure could be restarted from a different 
machine.
 
 
-
+
 private void
 openParentRegion(MasterProcedureEnvenv)
 Rollback close parent region
 
 
-
+
 private void
 postRollBackSplitRegion(MasterProcedureEnvenv)
 Action after rollback a split table region action.
 
 
-
+
 private void
 postSplitRegion(MasterProcedureEnvenv)
 Post split region actions
 
 
-
+
 boolean
 prepareSplitRegion(MasterProcedureEnvenv)
 Prepare to Split region.
 
 
-
+
 private void
 preSplitRegion(MasterProcedureEnvenv)
 Action before splitting region in a table.
 
 
-
+
 private void
 preSplitRegionAfterPONR(MasterProcedureEnvenv)
 Pre split region actions after the Point-of-No-Return 
step
 
 
-
+
 private void
 preSplitRegionBeforePONR(MasterProcedureEnvenv)
 Post split region actions before the Point-of-No-Return 
step
 
 
-
+
 protected void
 rollbackState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStatestate)
 called to perform the rollback of the specified state
 
 
-
+
 void
 serializeStateData(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamstream)
 The user-level code of the procedure may have some state to
  persist (e.g.
 
 
-
+
 private Pairorg.apache.hadoop.fs.Path,org.apache.hadoop.fs.Path
 splitStoreFile(HRegionFileSystemregionFs,
   byte[]family,
-  StoreFilesf)
+  StoreFilesf)
 
-
+
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
 splitStoreFiles(MasterProcedureEnvenv,
HRegionFileSystemregionFs)
 Create Split 

[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html
index 38db35a..0075c67 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":9,"i32":9,"i33":9,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":9,"i31":10,"i32":9,"i33":9,"i34":9,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -195,11 +195,11 @@ implements config
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMapStoreFile,byte[]
+private http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMapStoreFile,byte[]
 fileEnds
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMapStoreFile,byte[]
+private http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMapStoreFile,byte[]
 fileStarts
 Cached file metadata (or overrides as the case may be)
 
@@ -279,19 +279,19 @@ implements 
 void
-addCompactionResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles,
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFileresults)
+addCompactionResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles,
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFileresults)
 Adds only the new compaction results into the 
structure.
 
 
 
-com.google.common.collect.ImmutableCollectionStoreFile
+com.google.common.collect.ImmutableCollectionStoreFile
 clearCompactedFiles()
 Clears all the compacted files and returns them.
 
 
 
-com.google.common.collect.ImmutableCollectionStoreFile
+com.google.common.collect.ImmutableCollectionStoreFile
 clearFiles()
 Clears all the files currently in use and returns 
them.
 
@@ -302,23 +302,23 @@ implements 
 private byte[]
-endOf(StoreFilesf)
+endOf(StoreFilesf)
 
 
 private void
-ensureEdgeStripeMetadata(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFilestripe,
+ensureEdgeStripeMetadata(http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFilestripe,
 booleanisFirst)
 
 
 private void
-ensureLevel0Metadata(StoreFilesf)
+ensureLevel0Metadata(StoreFilesf)
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
-findExpiredFiles(com.google.common.collect.ImmutableListStoreFilestripe,
+private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
+findExpiredFiles(com.google.common.collect.ImmutableListStoreFilestripe,
 longmaxTs,
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilefilesCompacting,
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFileexpiredStoreFiles)
+

[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
index 56bc0bb..cfb5db8 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":9,"i16":10,"i17":10};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class StoreFileWriter
+public class StoreFileWriter
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements CellSink, ShipperListener
 A StoreFile writer.  Use this to read/write HBase Store 
Files. It is package
@@ -258,7 +258,7 @@ implements 
-All MethodsInstance MethodsConcrete Methods
+All MethodsStatic MethodsInstance MethodsConcrete Methods
 
 Modifier and Type
 Method and Description
@@ -342,10 +342,15 @@ implements getPath()
 
 
+(package private) static 
org.apache.hadoop.fs.Path
+getUniqueFile(org.apache.hadoop.fs.FileSystemfs,
+ org.apache.hadoop.fs.Pathdir)
+
+
 boolean
 hasGeneralBloom()
 
-
+
 void
 trackTimestamps(Cellcell)
 Record the earlest Put timestamp.
@@ -379,7 +384,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -388,7 +393,7 @@ implements 
 
 generalBloomFilterWriter
-private finalBloomFilterWriter generalBloomFilterWriter
+private finalBloomFilterWriter generalBloomFilterWriter
 
 
 
@@ -397,7 +402,7 @@ implements 
 
 deleteFamilyBloomFilterWriter
-private finalBloomFilterWriter deleteFamilyBloomFilterWriter
+private finalBloomFilterWriter deleteFamilyBloomFilterWriter
 
 
 
@@ -406,7 +411,7 @@ implements 
 
 bloomType
-private finalBloomType bloomType
+private finalBloomType bloomType
 
 
 
@@ -415,7 +420,7 @@ implements 
 
 earliestPutTs
-privatelong earliestPutTs
+privatelong earliestPutTs
 
 
 
@@ -424,7 +429,7 @@ implements 
 
 deleteFamilyCnt
-privatelong deleteFamilyCnt
+privatelong deleteFamilyCnt
 
 
 
@@ -433,7 +438,7 @@ implements 
 
 bloomContext
-privateBloomContext bloomContext
+privateBloomContext bloomContext
 
 
 
@@ -442,7 +447,7 @@ implements 
 
 deleteFamilyBloomContext
-privateBloomContext deleteFamilyBloomContext
+privateBloomContext deleteFamilyBloomContext
 
 
 
@@ -451,7 +456,7 @@ implements 
 
 timeRangeTrackerSet
-private finalboolean timeRangeTrackerSet
+private finalboolean timeRangeTrackerSet
 timeRangeTrackerSet is used to figure if we were passed a 
filled-out TimeRangeTracker or not.
  When flushing a memstore, we set the TimeRangeTracker that it accumulated 
during updates to
  memstore in here into this Writer and use this variable to indicate that we 
do not need to
@@ -465,7 +470,7 @@ implements 
 
 timeRangeTracker
-finalTimeRangeTracker timeRangeTracker
+finalTimeRangeTracker timeRangeTracker
 
 
 
@@ -474,7 +479,7 @@ implements 
 
 writer
-protectedHFile.Writer writer
+protectedHFile.Writer writer
 
 
 
@@ -491,7 +496,7 @@ implements 
 
 StoreFileWriter
-StoreFileWriter(org.apache.hadoop.fs.FileSystemfs,
+StoreFileWriter(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathpath,
 org.apache.hadoop.conf.Configurationconf,
 CacheConfigcacheConf,
@@ -525,7 +530,7 @@ implements 
 
 StoreFileWriter
-privateStoreFileWriter(org.apache.hadoop.fs.FileSystemfs,
+privateStoreFileWriter(org.apache.hadoop.fs.FileSystemfs,
 org.apache.hadoop.fs.Pathpath,
 org.apache.hadoop.conf.Configurationconf,
 CacheConfigcacheConf,
@@ -570,7 +575,7 @@ implements 
 
 appendMetadata
-publicvoidappendMetadata(longmaxSequenceId,
+publicvoidappendMetadata(longmaxSequenceId,
booleanmajorCompaction)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Writes meta data.
@@ -590,7 +595,7 @@ implements 
 
 

[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/mob/MobFile.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mob/MobFile.html 
b/devapidocs/org/apache/hadoop/hbase/mob/MobFile.html
index c347d5b..3790bd8 100644
--- a/devapidocs/org/apache/hadoop/hbase/mob/MobFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/mob/MobFile.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class MobFile
+public class MobFile
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 The mob file.
 
@@ -136,7 +136,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Field and Description
 
 
-private StoreFile
+private StoreFile
 sf
 
 
@@ -160,7 +160,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 protected 
-MobFile(StoreFilesf)
+MobFile(StoreFilesf)
 
 
 
@@ -253,7 +253,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 sf
-privateStoreFile sf
+privateStoreFile sf
 
 
 
@@ -270,7 +270,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MobFile
-protectedMobFile()
+protectedMobFile()
 
 
 
@@ -279,7 +279,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MobFile
-protectedMobFile(StoreFilesf)
+protectedMobFile(StoreFilesf)
 
 
 
@@ -296,7 +296,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getScanner
-publicStoreFileScannergetScanner()
+publicStoreFileScannergetScanner()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Internal use only. This is used by the sweeper.
 
@@ -313,7 +313,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 readCell
-publicCellreadCell(Cellsearch,
+publicCellreadCell(Cellsearch,
  booleancacheMobBlocks)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Reads a cell from the mob file.
@@ -334,7 +334,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 readCell
-publicCellreadCell(Cellsearch,
+publicCellreadCell(Cellsearch,
  booleancacheMobBlocks,
  longreadPt)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -357,7 +357,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getFileName
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetFileName()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetFileName()
 Gets the file name.
 
 Returns:
@@ -371,7 +371,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 open
-publicvoidopen()
+publicvoidopen()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Opens the underlying reader.
  It's not thread-safe. Use MobFileCache.openFile() instead.
@@ -387,7 +387,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 close
-publicvoidclose()
+publicvoidclose()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Closes the underlying reader, but do no evict blocks 
belonging to this file.
  It's not thread-safe. Use MobFileCache.closeFile() instead.
@@ -403,7 +403,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 create
-public staticMobFilecreate(org.apache.hadoop.fs.FileSystemfs,
+public staticMobFilecreate(org.apache.hadoop.fs.FileSystemfs,
  org.apache.hadoop.fs.Pathpath,
  org.apache.hadoop.conf.Configurationconf,
  CacheConfigcacheConf)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/mob/MobUtils.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mob/MobUtils.html 
b/devapidocs/org/apache/hadoop/hbase/mob/MobUtils.html
index e5162d9..4007600 100644
--- a/devapidocs/org/apache/hadoop/hbase/mob/MobUtils.html
+++ b/devapidocs/org/apache/hadoop/hbase/mob/MobUtils.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class 

[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineNamespaceProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineNamespaceProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineNamespaceProcedure.html
index 6f8ec43..cb51813 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineNamespaceProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineNamespaceProcedure.html
@@ -264,7 +264,7 @@ implements Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcedureMetrics, getProcId,
 getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime, getTimeout,
 getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasLock,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 holdLock,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 setChildrenLatch,
 setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner, 
setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 wasExecuted
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html
index 90cdfad..3059dac 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.html
@@ -331,7 +331,7 @@ extends Procedure
-addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 getParentProcId,
 getProcId, getProcIdHashCode,
 getResult,
 getRootProcedureId,
 getRootProcId,
 getStackIndexes,
 getState,
 getSubmittedTime,
 getTimeout,
 <
 a 
href="../../../../../../org/apache/hadoop/hbase/procedure2/Procedure.html#getTimeoutTimestamp--">getTimeoutTimestamp,
 hasChildren,
 hasException,
 hasOwner,
 hasParent,
 hasTimeout,
 haveSameParent,
 incChildrenLatch,
 isFailed,
 isFinished,
 isInitializing,
 isRunnable,
 isSuccess,
 isWaiting,
 removeStackIndex,
 setAbortFailure,
 s
 etChildrenLatch, setFailure,
 setFailure,
 setLastUpdate,
 setNonceKey,
 setOwner,
 setOwner,
 setParentProcId,
 setProcId,
 setResult,
 setRootProcId,
 setStackIndexes,
 setState,
 setSubmittedTime,
 setTimeout,
 setTimeoutFailure,
 shouldWaitClientAck,
 toString,
 toStringClass,
 toStringDetails,
 toStringSimpleSB,
 updateMetricsOnFinish,
 updateMetricsOnSubmit,
 updateTimestamp,
 
 wasExecuted
+addStackIndex,
 afterReplay,
 beforeReplay,
 compareTo,
 completionCleanup,
 doAcquireLock,
 doExecute,
 d
 oReleaseLock, doRollback,
 elapsedTime,
 getChildrenLatch,
 getException,
 getLastUpdate,
 getNonceKey,
 getOwner,
 

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.Comparators.GetPathName.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.Comparators.GetPathName.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.Comparators.GetPathName.html
deleted file mode 100644
index aef20ec..000
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.Comparators.GetPathName.html
+++ /dev/null
@@ -1,294 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-StoreFile.Comparators.GetPathName (Apache HBase 2.0.0-SNAPSHOT 
API)
-
-
-
-
-
-var methods = {"i0":10};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-PrevClass
-NextClass
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.regionserver
-Class 
StoreFile.Comparators.GetPathName
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetPathName
-
-
-
-
-
-
-
-All Implemented Interfaces:
-com.google.common.base.FunctionStoreFile,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-
-
-Enclosing class:
-StoreFile.Comparators
-
-
-
-private static class StoreFile.Comparators.GetPathName
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements com.google.common.base.FunctionStoreFile,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-
-
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Modifier
-Constructor and Description
-
-
-private 
-GetPathName()
-
-
-
-
-
-
-
-
-
-Method Summary
-
-All MethodsInstance MethodsConcrete Methods
-
-Modifier and Type
-Method and Description
-
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-apply(StoreFilesf)
-
-
-
-
-
-
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
-
-
-
-
-
-Methods inherited from interfacecom.google.common.base.Function
-equals
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Constructor Detail
-
-
-
-
-
-GetPathName

[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site ba217cb74 -> 6ad4f21aa


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
index 846a89c..c0c4e4b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.html
@@ -4,7 +4,7 @@
 
 
 
-Uses of Class org.apache.hadoop.hbase.regionserver.StoreFile (Apache 
HBase 2.0.0-SNAPSHOT API)
+Uses of Interface org.apache.hadoop.hbase.regionserver.StoreFile 
(Apache HBase 2.0.0-SNAPSHOT API)
 
 
 
@@ -12,7 +12,7 @@
 

[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html
index 243dfdf..8449f3f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.CompactionOrFlushMergeCopy.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private class StripeStoreFileManager.CompactionOrFlushMergeCopy
+private class StripeStoreFileManager.CompactionOrFlushMergeCopy
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Non-static helper class for merging compaction or flush 
results.
  Since we want to merge them atomically (more or less), it operates on lazy 
copies,
@@ -137,7 +137,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 Field and Description
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
 compactedFiles
 
 
@@ -145,15 +145,15 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 isFlush
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 l0Results
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFile
 level0Files
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFile
 results
 
 
@@ -161,7 +161,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 stripeEndRows
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFile
 stripeFiles
 
 
@@ -202,30 +202,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 private void
-deleteResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles)
+deleteResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles)
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFile
 getLevel0Copy()
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFile
+private http://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in java.util">ArrayListStoreFile
 getStripeCopy(intindex)
 
 
 private void
-mergeResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilecompactedFiles,
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFileresults)
+mergeResults(http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.html
index 644c7f8..2043d0d 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFile.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":9,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":9,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 PrevClass
-NextClass
+NextClass
 
 
 Frames
@@ -74,15 +74,15 @@ var activeTableTab = "activeTableTab";
 
 
 Summary:
-Nested|
+Nested|
 Field|
-Constr|
+Constr|
 Method
 
 
 Detail:
 Field|
-Constr|
+Constr|
 Method
 
 
@@ -93,63 +93,28 @@ var activeTableTab = "activeTableTab";
 
 
 org.apache.hadoop.hbase.regionserver
-Class StoreFile
+Interface StoreFile
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.regionserver.StoreFile
-
-
-
 
 
 
+
+All Known Implementing Classes:
+HStoreFile
+
 
 
-@InterfaceAudience.LimitedPrivate(value="Coprocessor")
-public class StoreFile
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-A Store data file.  Stores usually have one or more of 
these files.  They
- are produced by flushing the memstore to disk.  To
- create, instantiate a writer using StoreFileWriter.Builder
- and append data. Be sure to add any metadata before calling close on the
- Writer (Use the appendMetadata convenience methods). On close, a StoreFile
- is sitting in the Filesystem.  To refer to it, create a StoreFile instance
- passing filesystem and path.  To read, call initReader()
- StoreFiles may also reference store files in another Store.
-
- The reason for this weird pattern where you use a different instance for the
- writer and a reader is that we write once but read a lot more.
+@InterfaceAudience.LimitedPrivate(value="Coprocesssor")
+ @InterfaceStability.Evolving
+public interface StoreFile
+An interface to describe a store data file.
 
 
 
 
 
 
-
-
-
-
-
-Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-static class
-StoreFile.Comparators
-Useful comparators for comparing StoreFiles.
-
-
-
-
-
 
 
 
@@ -179,85 +144,35 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 BULKLOAD_TIME_KEY
 
 
-private CacheConfig
-cacheConf
-
-
-private BloomType
-cfBloomType
-Bloom filter type specified in column family 
configuration.
-
-
-
-private boolean
-compactedAway
-
-
-private http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorCell
-comparator
-
-
-private static boolean
-DEFAULT_STORE_FILE_READER_NO_READAHEAD
-
-
 static byte[]
 DELETE_FAMILY_COUNT
 Delete Family Count in FileInfo
 
 
-
+
 static byte[]
 EARLIEST_PUT_TS
 Key for timestamp of earliest-put in metadata
 
 
-
+
 static byte[]
 EXCLUDE_FROM_MINOR_COMPACTION_KEY
 Minor compaction flag in FileInfo
 
 
-
-private boolean
-excludeFromMinorCompaction
-
 
-private StoreFileInfo
-fileInfo
-
-
-private Cell
-firstKey
-
-
-private org.apache.hadoop.fs.FileSystem
-fs
-
-
 static byte[]
 LAST_BLOOM_KEY
 Last Bloom filter key in FileInfo
 
 
-
-private Cell
-lastKey
-
 
-private static 
org.apache.commons.logging.Log
-LOG
-
-
 static byte[]
 MAJOR_COMPACTION_KEY
 Major compaction flag in FileInfo
 
 
-
-private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicBoolean
-majorCompaction
-
 
 static byte[]
 MAX_SEQ_ID_KEY
@@ -265,52 +180,22 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-private long
-maxMemstoreTS
-
-
-private 

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetPathName.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetPathName.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetPathName.html
deleted file mode 100644
index aa7d722..000
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetPathName.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class 
org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetPathName (Apache 
HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetPathName
-
-No usage of 
org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetPathName
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetSeqId.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetSeqId.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetSeqId.html
deleted file mode 100644
index 5763081..000
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.GetSeqId.html
+++ /dev/null
@@ -1,125 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-
-Uses of Class 
org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetSeqId (Apache 
HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-
-Uses of 
Classorg.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetSeqId
-
-No usage of 
org.apache.hadoop.hbase.regionserver.StoreFile.Comparators.GetSeqId
-
-
-
-
-Skip navigation links
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-NoFrames
-
-
-AllClasses
-
-
-
-
-
-
-
-
-
-Copyright  20072017 https://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.html
deleted file mode 100644
index 0f9ce30..000
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/StoreFile.Comparators.html
+++ /dev/null
@@ -1,125 +0,0 @@

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index dac1be7..c63a61c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -329,6 +329,22 @@
 SplitTableRegionProcedure.getParentRegionServerName(MasterProcedureEnvenv)
 
 
+protected ProcedureMetrics
+UnassignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+
+
+protected ProcedureMetrics
+SplitTableRegionProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+
+
+protected ProcedureMetrics
+MergeTableRegionsProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+
+
+protected ProcedureMetrics
+AssignProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+
+
 (package private) static 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse
 Util.getRegionInfoResponse(MasterProcedureEnvenv,
  ServerNameregionLocation,
@@ -1222,82 +1238,86 @@
 ProcedureSyncWait.getMasterQuotaManager(MasterProcedureEnvenv)
 
 
+protected ProcedureMetrics
+ServerCrashProcedure.getProcedureMetrics(MasterProcedureEnvenv)
+
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 AddColumnFamilyProcedure.getRegionInfoList(MasterProcedureEnvenv)
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 ModifyTableProcedure.getRegionInfoList(MasterProcedureEnvenv)
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 DeleteColumnFamilyProcedure.getRegionInfoList(MasterProcedureEnvenv)
 
-
+
 private RegionLoad
 DispatchMergingRegionsProcedure.getRegionLoad(MasterProcedureEnvenv,
  ServerNamesn,
  HRegionInfohri)
 
-
+
 private ServerName
 DispatchMergingRegionsProcedure.getServerName(MasterProcedureEnvenv)
 The procedure could be restarted from a different 
machine.
 
 
-
+
 private static TableNamespaceManager
 DeleteNamespaceProcedure.getTableNamespaceManager(MasterProcedureEnvenv)
 
-
+
 private static TableNamespaceManager
 CreateNamespaceProcedure.getTableNamespaceManager(MasterProcedureEnvenv)
 
-
+
 private TableNamespaceManager
 ModifyNamespaceProcedure.getTableNamespaceManager(MasterProcedureEnvenv)
 
-
+
 private int
 DispatchMergingRegionsProcedure.getTimeout(MasterProcedureEnvenv)
 The procedure could be restarted from a different 
machine.
 
 
-
+
 private void
 ServerCrashProcedure.handleRIT(MasterProcedureEnvenv,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInforegions)
 Handle any outstanding RIT that are up against 
this.serverName, the crashed server.
 
 
-
+
 protected boolean
 AbstractStateMachineRegionProcedure.hasLock(MasterProcedureEnvenv)
 
-
+
 protected boolean
 AbstractStateMachineRegionProcedure.holdLock(MasterProcedureEnvenv)
 
-
+
 private void
 ModifyNamespaceProcedure.insertIntoNSTable(MasterProcedureEnvenv)
 Insert/update the row into namespace table
 
 
-
+
 protected static void
 CreateNamespaceProcedure.insertIntoNSTable(MasterProcedureEnvenv,
  NamespaceDescriptornsDescriptor)
 Insert the row into ns table
 
 
-
+
 private boolean
 DispatchMergingRegionsProcedure.isRegionsOnTheSameServer(MasterProcedureEnvenv)
 The procedure could be restarted from a different 
machine.
 
 
-
+
 protected boolean
 ServerCrashProcedure.isYieldBeforeExecuteFromState(MasterProcedureEnvenv,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStatestate)
@@ -1306,554 +1326,554 @@
  before we move to the next.
 
 
-
+
 private boolean
 DispatchMergingRegionsProcedure.MoveRegionsToSameRS(MasterProcedureEnvenv)
 Move all regions to the same region server
 
 
-
+
 protected static void
 CreateTableProcedure.moveTempDirectoryToHBaseRoot(MasterProcedureEnvenv,
 HTableDescriptorhTableDescriptor,
 
org.apache.hadoop.fs.PathtempTableDir)
 
-
+
 private void
 AddColumnFamilyProcedure.postAdd(MasterProcedureEnvenv,

org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStatestate)
 Action after adding column family.
 
 
-
+
 private void
 CloneSnapshotProcedure.postCloneSnapshot(MasterProcedureEnvenv)
 Action after cloning from snapshot.
 
 
-
+
 private void
 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
index cc0de48..30a49d0 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html
@@ -315,7 +315,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 (package private) void
-commitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
+commitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
 Moves multiple store files to the relative region's family 
store directory.
 
 
@@ -527,7 +527,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 org.apache.hadoop.fs.Path
 mergeStoreFile(HRegionInfomergedRegion,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-  StoreFilef,
+  StoreFilef,
   org.apache.hadoop.fs.PathmergedDir)
 Write out a merge reference under the given merges 
directory.
 
@@ -568,7 +568,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 void
 removeStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
+http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
 Closes and archives the specified store files from the 
specified family.
 
 
@@ -606,7 +606,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 org.apache.hadoop.fs.Path
 splitStoreFile(HRegionInfohri,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
-  StoreFilef,
+  StoreFilef,
   byte[]splitRow,
   booleantop,
   RegionSplitPolicysplitPolicy)
@@ -1326,7 +1326,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 
 
 commitStoreFiles
-voidcommitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
+voidcommitStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListStoreFilestoreFiles)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Moves multiple store files to the relative region's family 
store directory.
 
@@ -1363,7 +1363,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 
 removeStoreFiles
 publicvoidremoveStoreFiles(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfamilyName,
- http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
+ http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionStoreFilestoreFiles)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Closes and archives the specified store files from the 
specified family.
 
@@ -1512,7 +1512,7 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.h
 splitStoreFile
 publicorg.apache.hadoop.fs.PathsplitStoreFile(HRegionInfohri,
   

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
index 7d41091..00d2450 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -124,7 +124,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class UnassignProcedure
+public class UnassignProcedure
 extends RegionTransitionProcedure
 Procedure that describe the unassignment of a single region.
  There can only be one RegionTransitionProcedure per region running at the 
time,
@@ -272,59 +272,66 @@ extends RegionStates.RegionStateNoderegionNode)
 
 
+protected ProcedureMetrics
+getProcedureMetrics(MasterProcedureEnvenv)
+Override this method to provide procedure specific counters 
for submitted count, failed
+ count and time histogram.
+
+
+
 ServerName
 getServer(MasterProcedureEnvenv)
 Used by ServerCrashProcedure to see if this Assign/Unassign 
needs processing.
 
 
-
+
 TableProcedureInterface.TableOperationType
 getTableOperationType()
 Given an operation type we can take decisions about what to 
do with pending operations.
 
 
-
+
 protected boolean
 isRollbackSupported(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionStatestate)
 
-
+
 RemoteProcedureDispatcher.RemoteOperation
 remoteCallBuild(MasterProcedureEnvenv,
ServerNameserverName)
 
-
+
 protected void
 remoteCallFailed(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in 
java.io">IOExceptionexception)
 
-
+
 protected void
 reportTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCodecode,
 longseqId)
 
-
+
 void
 serializeStateData(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamstream)
 The user-level code of the procedure may have some state to
  persist (e.g.
 
 
-
+
 protected boolean
 startTransition(MasterProcedureEnvenv,
RegionStates.RegionStateNoderegionNode)
 
-
+
 void
 toStringClassDetails(http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuildersb)
 Extend the toString() information with the procedure details
  e.g.
 
 
-
+
 protected boolean
 updateTransition(MasterProcedureEnvenv,
 RegionStates.RegionStateNoderegionNode)
@@ -373,7 +380,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -382,7 +389,7 @@ extends 
 
 hostingServer
-protected volatileServerName hostingServer
+protected volatileServerName hostingServer
 Where to send the unassign RPC.
 
 
@@ -392,7 +399,7 @@ extends 
 
 destinationServer
-protected volatileServerName destinationServer
+protected volatileServerName destinationServer
 The Server we will subsequently assign the region too (can 
be null).
 
 
@@ -402,7 +409,7 @@ extends 
 
 serverCrashed
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean serverCrashed
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true;
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean serverCrashed
 
 
 
@@ -411,7 +418,7 @@ extends 
 
 force
-privateboolean force
+privateboolean force
 
 
 
@@ -428,7 +435,7 @@ extends 
 
 UnassignProcedure
-publicUnassignProcedure()
+publicUnassignProcedure()
 
 
 
@@ -437,7 +444,7 @@ extends 
 
 UnassignProcedure
-publicUnassignProcedure(HRegionInforegionInfo,
+publicUnassignProcedure(HRegionInforegionInfo,
  ServerNamehostingServer,
  booleanforce)
 
@@ -448,7 +455,7 @@ extends 
 
 

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/procedure2/Procedure.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/Procedure.html
index 6de66d4..a2db36b 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/Procedure.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":6,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":6,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":9,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":9,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":6,"i50":6,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10};
+var methods = 
{"i0":6,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":6,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":6,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10,"i25":9,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":6,"i51":6,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],4:["t3","Abstract 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public abstract class ProcedureTEnvironment
+public abstract class ProcedureTEnvironment
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableProcedureTEnvironment
 Base Procedure class responsible for Procedure Metadata;
@@ -414,286 +414,293 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 getParentProcId()
 
 
+protected ProcedureMetrics
+getProcedureMetrics(TEnvironmentenv)
+Override this method to provide procedure specific counters 
for submitted count, failed
+ count and time histogram.
+
+
+
 long
 getProcId()
 
-
+
 static long
 getProcIdHashCode(longprocId)
 Get an hashcode for the specified Procedure ID
 
 
-
+
 byte[]
 getResult()
 
-
+
 protected static http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 getRootProcedureId(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long,Procedureprocedures,
   Procedure?proc)
 
-
+
 long
 getRootProcId()
 
-
+
 protected int[]
 getStackIndexes()
 
-
+
 protected 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState
 getState()
 
-
+
 long
 getSubmittedTime()
 
-
+
 int
 getTimeout()
 
-
+
 protected long
 getTimeoutTimestamp()
 Timeout of the next timeout.
 
 
-
+
 protected boolean
 hasChildren()
 
-
+
 boolean
 hasException()
 
-
+
 protected boolean
 hasLock(TEnvironmentenv)
 This is used in conjunction with holdLock(Object).
 
 
-
+
 boolean
 hasOwner()
 
-
+
 boolean
 hasParent()
 
-
+
 boolean
 hasTimeout()
 
-
+
 static boolean
 haveSameParent(Procedure?a,
   Procedure?b)
 
-
+
 protected boolean
 holdLock(TEnvironmentenv)
 Used to keep the procedure lock even when the procedure is 
yielding or suspended.
 
 
-
+
 protected void
 incChildrenLatch()
 Called by the ProcedureExecutor on procedure-load to 
restore the latch state
 
 
-
+
 boolean
 isFailed()
 
-
+
 boolean
 isFinished()
 
-
+
 boolean
 isInitializing()
 
-
+
 protected boolean
 isRunnable()
 
-
+
 boolean
 isSuccess()
 
-
+
 boolean
 isWaiting()
 
-
+
 protected boolean
 isYieldAfterExecutionStep(TEnvironmentenv)
 By default, the procedure framework/executor will try to 
run procedures start to finish.
 
 
-
+
 protected void
 

[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/6ad4f21a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.html
new file mode 100644
index 000..270f408
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileComparators.html
@@ -0,0 +1,334 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+
+StoreFileComparators (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+PrevClass
+NextClass
+
+
+Frames
+NoFrames
+
+
+AllClasses
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.regionserver
+Class 
StoreFileComparators
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.regionserver.StoreFileComparators
+
+
+
+
+
+
+
+
+@InterfaceAudience.Private
+final class StoreFileComparators
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+Useful comparators for comparing StoreFiles.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+private static class
+StoreFileComparators.GetBulkTime
+
+
+private static class
+StoreFileComparators.GetFileSize
+
+
+private static class
+StoreFileComparators.GetMaxTimestamp
+
+
+private static class
+StoreFileComparators.GetPathName
+
+
+private static class
+StoreFileComparators.GetSeqId
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFile
+SEQ_ID
+Comparator that compares based on the Sequence Ids of the 
the StoreFiles.
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">ComparatorStoreFile
+SEQ_ID_MAX_TIMESTAMP
+Comparator for time-aware compaction.
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+StoreFileComparators()
+
+
+
+
+
+
+
+
+
+Method Summary
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--;
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-;
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--;
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--;
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--;
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--;
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-;
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-;
 title="class or interface in java.lang">wait
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Field Detail
+
+
+
+
+
+SEQ_ID
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in 

hbase git commit: HBASE-18150: Do not call FSUtils.setVersion() and FSUtils.checkVersion() when using checkRootDir() to check hbase.wal.dir

2017-06-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 8252fe4fb -> 69d3e332f


HBASE-18150: Do not call FSUtils.setVersion() and FSUtils.checkVersion() when 
using checkRootDir() to check hbase.wal.dir

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/69d3e332
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/69d3e332
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/69d3e332

Branch: refs/heads/branch-1
Commit: 69d3e332f1e0cf1ac581977483f2c865586f8d2d
Parents: 8252fe4
Author: Xiang Li 
Authored: Sat Jun 3 00:45:41 2017 +0800
Committer: tedyu 
Committed: Tue Jun 6 05:44:29 2017 -0700

--
 .../hadoop/hbase/master/MasterFileSystem.java   | 41 +---
 1 file changed, 27 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/69d3e332/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index 332a726..c1bd930 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -469,16 +469,23 @@ public class MasterFileSystem {
 } else {
   fs.mkdirs(rd);
 }
-// DFS leaves safe mode with 0 DNs when there are 0 blocks.
-// We used to handle this by checking the current DN count and waiting 
until
-// it is nonzero. With security, the check for datanode count doesn't 
work --
-// it is a privileged op. So instead we adopt the strategy of the 
jobtracker
-// and simply retry file creation during bootstrap indefinitely. As 
soon as
-// there is one datanode it will succeed. Permission problems should 
have
-// already been caught by mkdirs above.
-FSUtils.setVersion(fs, rd, c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
-  10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
-HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
+
+// HBASE-17437 updates createInitialFileSystemLayout() to re-use 
checkRootDir()
+// to check hbase.wal.dir after checking hbase.rootdir.
+// But FSUtils.setVersion() is supposed to be called only when 
checking hbase.rootdir,
+// while it is supposed to be bypassed when checking hbase.wal.dir.
+if (dirConfKey.equals(HConstants.HBASE_DIR)) {
+  // DFS leaves safe mode with 0 DNs when there are 0 blocks.
+  // We used to handle this by checking the current DN count and 
waiting until
+  // it is nonzero. With security, the check for datanode count 
doesn't work --
+  // it is a privileged op. So instead we adopt the strategy of the 
jobtracker
+  // and simply retry file creation during bootstrap indefinitely. As 
soon as
+  // there is one datanode it will succeed. Permission problems should 
have
+  // already been caught by mkdirs above.
+  FSUtils.setVersion(fs, rd,
+c.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000),
+c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS, 
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
+}
   } else {
 if (!fs.isDirectory(rd)) {
   throw new IllegalArgumentException(rd.toString() + " is not a 
directory");
@@ -493,10 +500,16 @@ public class MasterFileSystem {
   + "and restarting the master");
   fs.setPermission(rd, dirPerms);
 }
-// as above
-FSUtils.checkVersion(fs, rd, true, 
c.getInt(HConstants.THREAD_WAKE_FREQUENCY,
-  10 * 1000), c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS,
-HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
+
+// HBASE-17437 updates createInitialFileSystemLayout() to re-use 
checkRootDir()
+// to check hbase.wal.dir after checking hbase.rootdir.
+// But FSUtils.checkVersion() is supposed to be called only when 
checking hbase.rootdir,
+// while it is supposed to be bypassed when checking hbase.wal.dir.
+if (dirConfKey.equals(HConstants.HBASE_DIR)) {
+  FSUtils.checkVersion(fs, rd, true,
+c.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000),
+c.getInt(HConstants.VERSION_FILE_WRITE_ATTEMPTS, 
HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS));
+}
   }
 } catch (DeserializationException de) {
   LOG.fatal("Please fix invalid configuration for " + dirConfKey, de);



[4/4] hbase git commit: HBASE-18038 Rename StoreFile to HStoreFile and add a StoreFile interface for CP

2017-06-06 Thread zhangduo
HBASE-18038 Rename StoreFile to HStoreFile and add a StoreFile interface for CP


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ee0f148c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ee0f148c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ee0f148c

Branch: refs/heads/master
Commit: ee0f148c730e0ae1cb616406166487fba78a2298
Parents: 8bfa8aa
Author: zhangduo 
Authored: Tue Jun 6 16:35:19 2017 +0800
Committer: zhangduo 
Committed: Tue Jun 6 20:36:38 2017 +0800

--
 .../assignment/MergeTableRegionsProcedure.java  |9 +-
 .../assignment/SplitTableRegionProcedure.java   |   13 +-
 .../master/balancer/StochasticLoadBalancer.java |3 +-
 .../apache/hadoop/hbase/mob/CachedMobFile.java  |3 +-
 .../org/apache/hadoop/hbase/mob/MobFile.java|3 +-
 .../org/apache/hadoop/hbase/mob/MobUtils.java   |5 +-
 .../compactions/PartitionedMobCompactor.java|   10 +-
 .../hbase/regionserver/CompactionTool.java  |   38 +-
 .../regionserver/DateTieredStoreEngine.java |2 +-
 .../hbase/regionserver/DefaultStoreEngine.java  |2 +-
 .../regionserver/DefaultStoreFileManager.java   |8 +-
 .../hadoop/hbase/regionserver/HMobStore.java|2 +-
 .../hadoop/hbase/regionserver/HRegion.java  |6 +-
 .../hadoop/hbase/regionserver/HStore.java   |   30 +-
 .../hadoop/hbase/regionserver/HStoreFile.java   |  560 +
 .../hadoop/hbase/regionserver/StoreFile.java|  736 ++--
 .../regionserver/StoreFileComparators.java  |   96 ++
 .../hbase/regionserver/StoreFileInfo.java   |1 -
 .../hbase/regionserver/StoreFileScanner.java|   20 +-
 .../hbase/regionserver/StoreFileWriter.java |   21 +-
 .../hadoop/hbase/regionserver/StoreScanner.java |4 +-
 .../hadoop/hbase/regionserver/StoreUtils.java   |   94 +-
 .../regionserver/StripeStoreFileManager.java|   20 +-
 .../compactions/CompactionRequest.java  |   45 +-
 .../regionserver/compactions/Compactor.java |4 +-
 .../compactions/DateTieredCompactionPolicy.java |   48 +-
 .../compactions/DateTieredCompactor.java|4 +-
 .../compactions/RatioBasedCompactionPolicy.java |7 +-
 .../compactions/SortedCompactionPolicy.java |   54 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |   13 +-
 .../hadoop/hbase/util/BloomFilterFactory.java   |2 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |2 +-
 .../hbase/mapreduce/TestHFileOutputFormat2.java |5 +-
 .../apache/hadoop/hbase/mob/TestMobFile.java|6 +-
 .../hbase/mob/compactions/TestMobCompactor.java |3 +-
 .../TestPartitionedMobCompactor.java|   40 +-
 .../hbase/namespace/TestNamespaceAuditor.java   |2 +
 .../AbstractTestDateTieredCompactionPolicy.java |9 +-
 .../regionserver/DataBlockEncodingTool.java |2 +-
 .../EncodedSeekPerformanceTest.java |4 +-
 .../hbase/regionserver/MockStoreFile.java   |   59 +-
 .../regionserver/TestCacheOnWriteInSchema.java  |2 +-
 .../TestCompactionArchiveConcurrentClose.java   |   16 +-
 .../TestCompactionArchiveIOException.java   |   11 +-
 .../regionserver/TestCompoundBloomFilter.java   |   10 +-
 .../regionserver/TestEncryptionKeyRotation.java |8 +-
 .../TestEncryptionRandomKeying.java |8 +-
 .../hbase/regionserver/TestFSErrorsExposed.java |8 +-
 .../hbase/regionserver/TestHMobStore.java   |   29 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |1 -
 .../regionserver/TestHRegionReplayEvents.java   |   14 +-
 .../regionserver/TestHRegionServerBulkLoad.java |8 +-
 .../hbase/regionserver/TestHStoreFile.java  | 1106 ++
 .../regionserver/TestMobStoreCompaction.java|   15 +-
 .../hbase/regionserver/TestRegionReplicas.java  |2 +-
 .../regionserver/TestReversibleScanners.java|   14 +-
 .../TestSplitTransactionOnCluster.java  |3 +-
 .../hadoop/hbase/regionserver/TestStore.java|5 +-
 .../hbase/regionserver/TestStoreFile.java   | 1106 --
 .../regionserver/TestStripeStoreEngine.java |2 +
 .../TestStripeStoreFileManager.java |   13 +-
 .../compactions/MockStoreFileGenerator.java |9 +-
 .../compactions/PerfTestCompactionPolicies.java |   21 +-
 .../TestCompactedHFilesDischarger.java  |   13 +-
 .../compactions/TestDateTieredCompactor.java|3 +-
 .../compactions/TestStripeCompactionPolicy.java |8 +-
 .../visibility/TestVisibilityLabels.java|4 +-
 .../apache/hadoop/hbase/util/HFileTestUtil.java |   13 +-
 68 files changed, 2331 insertions(+), 2116 deletions(-)
--



[2/4] hbase git commit: HBASE-18038 Rename StoreFile to HStoreFile and add a StoreFile interface for CP

2017-06-06 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/ee0f148c/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
new file mode 100644
index 000..7070a80
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
@@ -0,0 +1,1106 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.OptionalLong;
+import java.util.TreeSet;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.HFileLink;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.BlockCache;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.CacheStats;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
+import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
+import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
+import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.util.BloomFilterFactory;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ChecksumType;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+/**
+ * Test HStoreFile
+ */
+@Category({RegionServerTests.class, SmallTests.class})
+public class TestHStoreFile extends HBaseTestCase {
+  private static final Log LOG = LogFactory.getLog(TestHStoreFile.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+  private CacheConfig cacheConf =  new 
CacheConfig(TEST_UTIL.getConfiguration());
+  private static String ROOT_DIR = 
TEST_UTIL.getDataTestDir("TestStoreFile").toString();
+  private static final ChecksumType CKTYPE = ChecksumType.CRC32C;
+  private static final int CKBYTES = 512;
+  private static String TEST_FAMILY = "cf";
+
+  @Before
+  public void setUp() throws Exception {
+super.setUp();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+super.tearDown();
+  }
+
+  /**
+   * Write a file and then assert that we can read from top and bottom halves
+   * using two HalfMapFiles.
+   * @throws Exception
+   */
+  @Test
+  public void testBasicHalfMapFile() throws Exception {
+final HRegionInfo hri =
+new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
+HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
+  conf, fs, new Path(testDir, 

[3/4] hbase git commit: HBASE-18038 Rename StoreFile to HStoreFile and add a StoreFile interface for CP

2017-06-06 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/ee0f148c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java
new file mode 100644
index 000..961e338
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import com.google.common.base.Function;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Ordering;
+
+import java.util.Comparator;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Useful comparators for comparing StoreFiles.
+ */
+@InterfaceAudience.Private
+final class StoreFileComparators {
+  /**
+   * Comparator that compares based on the Sequence Ids of the the StoreFiles. 
Bulk loads that did
+   * not request a seq ID are given a seq id of -1; thus, they are placed 
before all non- bulk
+   * loads, and bulk loads with sequence Id. Among these files, the size is 
used to determine the
+   * ordering, then bulkLoadTime. If there are ties, the path name is used as 
a tie-breaker.
+   */
+  public static final Comparator SEQ_ID =
+  Ordering.compound(ImmutableList.of(Ordering.natural().onResultOf(new 
GetSeqId()),
+Ordering.natural().onResultOf(new GetFileSize()).reverse(),
+Ordering.natural().onResultOf(new GetBulkTime()),
+Ordering.natural().onResultOf(new GetPathName(;
+
+  /**
+   * Comparator for time-aware compaction. SeqId is still the first ordering 
criterion to maintain
+   * MVCC.
+   */
+  public static final Comparator SEQ_ID_MAX_TIMESTAMP =
+  Ordering.compound(ImmutableList.of(Ordering.natural().onResultOf(new 
GetSeqId()),
+Ordering.natural().onResultOf(new GetMaxTimestamp()),
+Ordering.natural().onResultOf(new GetFileSize()).reverse(),
+Ordering.natural().onResultOf(new GetBulkTime()),
+Ordering.natural().onResultOf(new GetPathName(;
+
+  private static class GetSeqId implements Function {
+@Override
+public Long apply(StoreFile sf) {
+  return sf.getMaxSequenceId();
+}
+  }
+
+  private static class GetFileSize implements Function {
+@Override
+public Long apply(StoreFile sf) {
+  if (sf.getReader() != null) {
+return sf.getReader().length();
+  } else {
+// the reader may be null for the compacted files and if the archiving
+// had failed.
+return -1L;
+  }
+}
+  }
+
+  private static class GetBulkTime implements Function {
+@Override
+public Long apply(StoreFile sf) {
+  return sf.getBulkLoadTimestamp().orElse(Long.MAX_VALUE);
+}
+  }
+
+  private static class GetPathName implements Function {
+@Override
+public String apply(StoreFile sf) {
+  return sf.getPath().getName();
+}
+  }
+
+  private static class GetMaxTimestamp implements Function {
+@Override
+public Long apply(StoreFile sf) {
+  return sf.getMaximumTimestamp().orElse(Long.MAX_VALUE);
+}
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/ee0f148c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
index 0e99c74..c656183 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
@@ -283,7 +283,6 @@ public class StoreFileInfo {
*/
   public HDFSBlocksDistribution computeHDFSBlocksDistribution(final 

[1/4] hbase git commit: HBASE-18038 Rename StoreFile to HStoreFile and add a StoreFile interface for CP

2017-06-06 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 8bfa8aaac -> ee0f148c7


http://git-wip-us.apache.org/repos/asf/hbase/blob/ee0f148c/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
deleted file mode 100644
index d1444c9..000
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
+++ /dev/null
@@ -1,1106 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeSet;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseTestCase;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.io.HFileLink;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.io.hfile.BlockCache;
-import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.io.hfile.CacheStats;
-import org.apache.hadoop.hbase.io.hfile.HFileContext;
-import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
-import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
-import org.apache.hadoop.hbase.io.hfile.HFileScanner;
-import org.apache.hadoop.hbase.testclassification.RegionServerTests;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.BloomFilterFactory;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ChecksumType;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-/**
- * Test HStoreFile
- */
-@Category({RegionServerTests.class, SmallTests.class})
-public class TestStoreFile extends HBaseTestCase {
-  private static final Log LOG = LogFactory.getLog(TestStoreFile.class);
-  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
-  private CacheConfig cacheConf =  new 
CacheConfig(TEST_UTIL.getConfiguration());
-  private static String ROOT_DIR = 
TEST_UTIL.getDataTestDir("TestStoreFile").toString();
-  private static final ChecksumType CKTYPE = ChecksumType.CRC32C;
-  private static final int CKBYTES = 512;
-  private static String TEST_FAMILY = "cf";
-
-  @Before
-  public void setUp() throws Exception {
-super.setUp();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-super.tearDown();
-  }
-
-  /**
-   * Write a file and then assert that we can read from top and bottom halves
-   * using two HalfMapFiles.
-   * @throws Exception
-   */
-  @Test
-  public void testBasicHalfMapFile() throws Exception {
-final HRegionInfo hri =
-new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb"));
-HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
-  

hbase git commit: HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing tags length overflow.

2017-06-06 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 2fc26cc24 -> b9d8f3b85


HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing 
tags length overflow.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b9d8f3b8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b9d8f3b8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b9d8f3b8

Branch: refs/heads/branch-1.2
Commit: b9d8f3b8504b0126917b6693381762ce15800248
Parents: 2fc26cc
Author: anoopsamjohn 
Authored: Tue Jun 6 12:28:17 2017 +0530
Committer: anoopsamjohn 
Committed: Tue Jun 6 12:28:17 2017 +0530

--
 .../org/apache/hadoop/hbase/regionserver/HRegion.java | 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b9d8f3b8/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index c6c611d..7006dbc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -7748,7 +7748,19 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 // If we are making the array in here, given we are the last thing 
checked, we'll be only thing
 // in the array so set its size to '1' (I saw this being done in earlier 
version of
 // tag-handling).
-if (tags == null) tags = new ArrayList(1);
+if (tags == null) {
+  tags = new ArrayList(1);
+} else {
+  // Remove existing TTL tags if any
+  Iterator tagsItr = tags.iterator();
+  while (tagsItr.hasNext()) {
+Tag tag = tagsItr.next();
+if (tag.getType() == TagType.TTL_TAG_TYPE) {
+  tagsItr.remove();
+  break;
+}
+  }
+}
 tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
 return tags;
   }



hbase git commit: HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing tags length overflow.

2017-06-06 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 9ba21e3d8 -> 041f14341


HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing 
tags length overflow.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/041f1434
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/041f1434
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/041f1434

Branch: refs/heads/branch-1.3
Commit: 041f14341492acd465f1c6d25e5c9f4c1438e7d9
Parents: 9ba21e3
Author: anoopsamjohn 
Authored: Tue Jun 6 12:27:07 2017 +0530
Committer: anoopsamjohn 
Committed: Tue Jun 6 12:27:07 2017 +0530

--
 .../org/apache/hadoop/hbase/regionserver/HRegion.java | 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/041f1434/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 11fc181..a603910 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -7959,7 +7959,19 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 // If we are making the array in here, given we are the last thing 
checked, we'll be only thing
 // in the array so set its size to '1' (I saw this being done in earlier 
version of
 // tag-handling).
-if (tags == null) tags = new ArrayList(1);
+if (tags == null) {
+  tags = new ArrayList(1);
+} else {
+  // Remove existing TTL tags if any
+  Iterator tagsItr = tags.iterator();
+  while (tagsItr.hasNext()) {
+Tag tag = tagsItr.next();
+if (tag.getType() == TagType.TTL_TAG_TYPE) {
+  tagsItr.remove();
+  break;
+}
+  }
+}
 tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
 return tags;
   }



hbase git commit: HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing tags length overflow.

2017-06-06 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-1 395d9a08e -> 8252fe4fb


HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing 
tags length overflow.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8252fe4f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8252fe4f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8252fe4f

Branch: refs/heads/branch-1
Commit: 8252fe4fbb184ab968c43e4238fca1a8722899df
Parents: 395d9a0
Author: anoopsamjohn 
Authored: Tue Jun 6 12:26:21 2017 +0530
Committer: anoopsamjohn 
Committed: Tue Jun 6 12:26:21 2017 +0530

--
 .../org/apache/hadoop/hbase/regionserver/HRegion.java | 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8252fe4f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index ff96822..cea7c6f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -8155,7 +8155,19 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 // If we are making the array in here, given we are the last thing 
checked, we'll be only thing
 // in the array so set its size to '1' (I saw this being done in earlier 
version of
 // tag-handling).
-if (tags == null) tags = new ArrayList(1);
+if (tags == null) {
+  tags = new ArrayList(1);
+} else {
+  // Remove existing TTL tags if any
+  Iterator tagsItr = tags.iterator();
+  while (tagsItr.hasNext()) {
+Tag tag = tagsItr.next();
+if (tag.getType() == TagType.TTL_TAG_TYPE) {
+  tagsItr.remove();
+  break;
+}
+  }
+}
 tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
 return tags;
   }



hbase git commit: HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing tags length overflow.

2017-06-06 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/master 59448cddd -> 8bfa8aaac


HBASE-18030 Per Cell TTL tags may get duplicated with increments/Append causing 
tags length overflow.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8bfa8aaa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8bfa8aaa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8bfa8aaa

Branch: refs/heads/master
Commit: 8bfa8aaaca4560855cb672a0e8232d3849d93c85
Parents: 59448cd
Author: anoopsamjohn 
Authored: Tue Jun 6 12:25:15 2017 +0530
Committer: anoopsamjohn 
Committed: Tue Jun 6 12:25:15 2017 +0530

--
 .../java/org/apache/hadoop/hbase/TagUtil.java   | 10 
 .../org/apache/hadoop/hbase/TestTagUtil.java| 49 
 2 files changed, 59 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8bfa8aaa/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
index 936d8c2..4682035 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
@@ -277,6 +277,16 @@ public final class TagUtil {
 // tag-handling).
 if (tags == null) {
   tags = new ArrayList<>(1);
+} else {
+  // Remove existing TTL tags if any
+  Iterator tagsItr = tags.iterator();
+  while (tagsItr.hasNext()) {
+Tag tag = tagsItr.next();
+if (tag.getType() == TagType.TTL_TAG_TYPE) {
+  tagsItr.remove();
+  break;
+}
+  }
 }
 tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
 return tags;

http://git-wip-us.apache.org/repos/asf/hbase/blob/8bfa8aaa/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java
new file mode 100644
index 000..d7894f4
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTagUtil.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ MiscTests.class, SmallTests.class })
+public class TestTagUtil {
+
+  @Test
+  public void testCarryForwardTTLTag() throws Exception {
+// No tags so far and the TTL tag must get added to the Tags list
+long ttl = 10 * 1000;
+List tags = TagUtil.carryForwardTTLTag(null, ttl);
+assertEquals(1, tags.size());
+Tag ttlTag = tags.get(0);
+assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType());
+assertEquals(ttl, TagUtil.getValueAsLong(ttlTag));
+// Already having a TTL tag in the list. So the call must remove the old 
tag
+long ttl2 = 30 * 1000;
+tags = TagUtil.carryForwardTTLTag(tags, ttl2);
+assertEquals(1, tags.size());
+ttlTag = tags.get(0);
+assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType());
+assertEquals(ttl2, TagUtil.getValueAsLong(ttlTag));
+  }
+}