[1/6] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18410 [created] 5d0ee5f0e


HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - 
addendum

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c3ac4e45
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c3ac4e45
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c3ac4e45

Branch: refs/heads/HBASE-18410
Commit: c3ac4e45f2ff54fa1fa2221ae37c3e50303f0500
Parents: 03eb620
Author: huzheng 
Authored: Wed Jun 7 14:49:29 2017 +0800
Committer: Sean Busbey 
Committed: Mon Oct 9 23:03:21 2017 -0500

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c3ac4e45/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 87dae1c..7969db7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -144,7 +145,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
+  if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -403,7 +404,14 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-prevCellList.set(i, c);
+if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
+  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
+  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
+  // need not save current cell to prevCellList for saving heap memory.
+  prevCellList.set(i, null);
+} else {
+  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
+}
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[2/6] hbase git commit: HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature branch.

2017-10-09 Thread busbey
HBASE-18410 disable the HBASE-18957 test until we can fix it on the feature 
branch.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0354b2d2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0354b2d2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0354b2d2

Branch: refs/heads/HBASE-18410
Commit: 0354b2d2feb37f665ae9886faed816e6a9f2daf5
Parents: c3b3fd7
Author: Sean Busbey 
Authored: Mon Oct 9 15:24:00 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 23:03:21 2017 -0500

--
 .../java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java   | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0354b2d2/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
index dd2399f..590b26e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -58,6 +58,7 @@ public class TestFilterListOnMini {
 TEST_UTIL.shutdownMiniCluster();
   }
 
+  @Ignore("HBASE-18410 Should not merge without this test running.")
   @Test
   public void testFiltersWithOR() throws Exception {
 TableName tn = TableName.valueOf(name.getMethodName());



[5/6] hbase git commit: HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

2017-10-09 Thread busbey
HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fd0e0e91
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fd0e0e91
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fd0e0e91

Branch: refs/heads/HBASE-18410
Commit: fd0e0e91b7d875fe2c22ebc3ba704cc7f84aa6c7
Parents: 5119d60
Author: huzheng 
Authored: Thu Jun 8 15:58:42 2017 +0800
Committer: Sean Busbey 
Committed: Mon Oct 9 23:03:22 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 542 ---
 .../hadoop/hbase/filter/TestFilterList.java | 146 +++--
 2 files changed, 469 insertions(+), 219 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fd0e0e91/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index cd51648..961c991 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,62 +90,53 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s. The default operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+reversed = checkAndGetReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-reversed = getReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
+this(Operator.MUST_PASS_ALL, rowFilters);
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
-   * MUST_PASS_ALL is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
+   * is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Arrays.asList(rowFilters));
+this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
-   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this.operator = operator;
-this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   *
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
+this(operator, new ArrayList<>());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
-   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
+this(operator, Arrays.asList(rowFilters));
   }
 
-  public void initPrevListForMustPassOne(int size) {
+  private void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -156,10 +147,8 @@ final public class FilterList extends FilterBase {
 }
   }
 
-
   /**
* Get the operator.
-   *
* @return operator
*/
   public Operator getOperator() {
@@ -168,7 +157,6 @@ final public class FilterList extends FilterBase {
 
   /**
* Get the filters.
-   *
* @return filters
*/
   public List getFilters() {
@@ -183,33 +171,22 @@ final public class FilterList extends FilterBase {
 return filters.isEmpty();
   }
 
-  private 

[3/6] hbase git commit: HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

2017-10-09 Thread busbey
HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/03eb620d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/03eb620d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/03eb620d

Branch: refs/heads/HBASE-18410
Commit: 03eb620d282fd804810254de94c1af7c67cf2574
Parents: 0354b2d
Author: huzheng 
Authored: Sat May 27 16:58:00 2017 +0800
Committer: Sean Busbey 
Committed: Mon Oct 9 23:03:21 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  76 +++-
 .../hadoop/hbase/filter/TestFilterList.java | 117 +++
 2 files changed, 191 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/03eb620d/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 9c4da41..87dae1c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -66,6 +66,14 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Filter seekHintFilter = null;
 
+  /**
+   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
+   * we use the previous return code to decide whether we should pass current 
cell encountered to
+   * the filter. For MUST_PASS_ALL, the two list are meaningless.
+   */
+  private List prevFilterRCList = null;
+  private List prevCellList = null;
+
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -87,6 +95,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -106,6 +115,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -117,6 +127,7 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -128,8 +139,21 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
+  }
+
+  public void initPrevListForMustPassOne(int size) {
+if (operator == Operator.MUST_PASS_ONE) {
+  if (this.prevCellList == null) {
+prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+  if (this.prevCellList == null) {
+prevCellList = new ArrayList<>(Collections.nCopies(size, null));
+  }
+}
   }
 
+
   /**
* Get the operator.
*
@@ -184,6 +208,10 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
+if (operator == Operator.MUST_PASS_ONE) {
+  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
+  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
+}
   }
 
   /**
@@ -200,6 +228,10 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
+  if (operator == Operator.MUST_PASS_ONE) {
+prevFilterRCList.set(i, null);
+prevCellList.set(i, null);
+  }
 }
 seekHintFilter = null;
   }
@@ -282,6 +314,41 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
+  /**
+   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
+   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
+   * in filter list return SKIP, then the filter list will return SKIP. In 
this case, we should pass
+   * the cell following the previous cell, and it's possible that the next 
cell has the same column
+   * as the previous cell even if filter-A has NEXT_COL returned 

[6/6] hbase git commit: HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()

2017-10-09 Thread busbey
HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5d0ee5f0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5d0ee5f0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5d0ee5f0

Branch: refs/heads/HBASE-18410
Commit: 5d0ee5f0e0e160cec64362e65979e41a72223382
Parents: fd0e0e9
Author: Biju Nair 
Authored: Fri Sep 29 16:55:54 2017 -0400
Committer: Sean Busbey 
Committed: Mon Oct 9 23:03:22 2017 -0500

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5d0ee5f0/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 961c991..b94429f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,6 +566,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
+  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -577,6 +578,7 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
+  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[4/6] hbase git commit: HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT

2017-10-09 Thread busbey
HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL 
FilterList return SEEK_NEXT_USING_HINT


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5119d607
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5119d607
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5119d607

Branch: refs/heads/HBASE-18410
Commit: 5119d607613d531c395a5b9059951e208649c85f
Parents: c3ac4e4
Author: tedyu 
Authored: Thu Sep 7 04:07:09 2017 -0700
Committer: Sean Busbey 
Committed: Mon Oct 9 23:03:22 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 31 
 .../hadoop/hbase/filter/TestFilterList.java |  6 ++--
 2 files changed, 28 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5119d607/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 7969db7..cd51648 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -22,7 +22,9 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
@@ -65,7 +67,7 @@ final public class FilterList extends FilterBase {
   private static final int MAX_LOG_FILTERS = 5;
   private Operator operator = Operator.MUST_PASS_ALL;
   private final List filters;
-  private Filter seekHintFilter = null;
+  private Set seekHintFilter = new HashSet<>();
 
   /**
* Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
@@ -234,7 +236,7 @@ final public class FilterList extends FilterBase {
 prevCellList.set(i, null);
   }
 }
-seekHintFilter = null;
+seekHintFilter.clear();
   }
 
   @Override
@@ -358,6 +360,7 @@ final public class FilterList extends FilterBase {
   return ReturnCode.INCLUDE;
 }
 this.referenceCell = c;
+seekHintFilter.clear();
 
 // Accumulates successive transformation of every filter that includes the 
Cell:
 Cell transformed = c;
@@ -389,10 +392,12 @@ final public class FilterList extends FilterBase {
   transformed = filter.transformCell(transformed);
   continue;
 case SEEK_NEXT_USING_HINT:
-  seekHintFilter = filter;
-  return code;
+  seekHintFilter.add(filter);
+  continue;
 default:
-  return code;
+  if (seekHintFilter.isEmpty()) {
+return code;
+  }
 }
   } else if (operator == Operator.MUST_PASS_ONE) {
 Cell prevCell = this.prevCellList.get(i);
@@ -442,6 +447,10 @@ final public class FilterList extends FilterBase {
   }
 }
 
+if (!seekHintFilter.isEmpty()) {
+  return ReturnCode.SEEK_NEXT_USING_HINT;
+}
+
 // Save the transformed Cell for transform():
 this.transformedCell = transformed;
 
@@ -565,7 +574,17 @@ final public class FilterList extends FilterBase {
 }
 Cell keyHint = null;
 if (operator == Operator.MUST_PASS_ALL) {
-  if (seekHintFilter != null) keyHint = 
seekHintFilter.getNextCellHint(currentCell);
+  for (Filter filter : seekHintFilter) {
+if (filter.filterAllRemaining()) continue;
+Cell curKeyHint = filter.getNextCellHint(currentCell);
+if (keyHint == null) {
+  keyHint = curKeyHint;
+  continue;
+}
+if (CellComparator.COMPARATOR.compare(keyHint, curKeyHint) < 0) {
+  keyHint = curKeyHint;
+}
+  }
   return keyHint;
 }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/5119d607/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 90f95a5..f20a9ba 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -502,7 +502,7 @@ public class TestFilterList {
 FilterList filterList = new FilterList(Operator.MUST_PASS_ONE,
 Arrays.asList(new Filter [] { filterMinHint, filterMaxHint } ));
 assertEquals(0, 

[2/2] hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eeb9cf02
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eeb9cf02
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eeb9cf02

Branch: refs/heads/branch-1.1
Commit: eeb9cf0202ea55fcb51604cfb6c3763378e76c25
Parents: c01c55d
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 22:48:06 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eeb9cf02/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = table.getScanner(scan);
+LOG.info("Filter list: " + filterList);
+for (Result rr = scanner.next(); rr != null; rr = scanner.next()) 

[1/2] hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 c01c55d5f -> eeb9cf020
  refs/heads/branch-1.2 ffed972fd -> bd63eb73c


HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bd63eb73
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bd63eb73
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bd63eb73

Branch: refs/heads/branch-1.2
Commit: bd63eb73cd80b3b06111d4ade7556516614958c8
Parents: ffed972
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 22:40:36 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bd63eb73/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = 

hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 5b7bcdfc4 -> 0d8f9683b


HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d8f9683
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d8f9683
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d8f9683

Branch: refs/heads/branch-1.3
Commit: 0d8f9683b1c0ac7fe1fe350d576c8c72a83791ff
Parents: 5b7bcdf
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 20:58:11 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d8f9683/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = table.getScanner(scan);
+LOG.info("Filter list: " 

[2/2] hbase git commit: Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned"

2017-10-09 Thread busbey
Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells 
returned"

This reverts commit 256fc63007aecb63028b71ad1383d896f11db701.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/968eea84
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/968eea84
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/968eea84

Branch: refs/heads/branch-1.4
Commit: 968eea84ad71aa6b285361f106f7293aa5191e98
Parents: 171cb0f
Author: Sean Busbey 
Authored: Fri Oct 6 14:26:50 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 20:48:29 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  82 +
 .../org/apache/hadoop/hbase/KeyValueUtil.java   |  15 ---
 .../hadoop/hbase/filter/TestFilterList.java | 117 +--
 3 files changed, 3 insertions(+), 211 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/968eea84/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index ddbd4a7..be22e5d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,10 +21,8 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.List;
 
-import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
@@ -71,14 +69,6 @@ final public class FilterList extends Filter {
   private List filters = new ArrayList();
   private Filter seekHintFilter = null;
 
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceKV = null;
 
@@ -103,7 +93,6 @@ final public class FilterList extends Filter {
 } else {
   this.filters = new ArrayList(rowFilters);
 }
-initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -113,7 +102,6 @@ final public class FilterList extends Filter {
*/
   public FilterList(final Filter... rowFilters) {
 this.filters = new ArrayList(Arrays.asList(rowFilters));
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -123,7 +111,6 @@ final public class FilterList extends Filter {
*/
   public FilterList(final Operator operator) {
 this.operator = operator;
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -135,7 +122,6 @@ final public class FilterList extends Filter {
   public FilterList(final Operator operator, final List rowFilters) {
 this.filters = new ArrayList(rowFilters);
 this.operator = operator;
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -147,18 +133,6 @@ final public class FilterList extends Filter {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this.filters = new ArrayList(Arrays.asList(rowFilters));
 this.operator = operator;
-initPrevListForMustPassOne(filters.size());
-  }
-
-  public void initPrevListForMustPassOne(int size) {
-if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevFilterRCList == null) {
-prevFilterRCList = new ArrayList(Collections.nCopies(size, 
(ReturnCode) null));
-  }
-  if (this.prevCellList == null) {
-prevCellList = new ArrayList(Collections.nCopies(size, (Cell) 
null));
-  }
-}
   }
 
   /**
@@ -191,10 +165,6 @@ final public class FilterList extends Filter {
   + this.isReversed());
 }
 this.filters.add(filter);
-if (operator == Operator.MUST_PASS_ONE) {
-  this.prevFilterRCList.add((ReturnCode) null);
-  this.prevCellList.add((Cell) null);
-}
   }
 
   @Override
@@ -202,10 +172,6 @@ final public class FilterList extends Filter {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   

[1/2] hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 ab8b0a366 -> 968eea84a


HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/171cb0f1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/171cb0f1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/171cb0f1

Branch: refs/heads/branch-1.4
Commit: 171cb0f174872766ace2dc8a4ac8ce9941f86e56
Parents: ab8b0a3
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 20:48:14 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/171cb0f1/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = table.getScanner(scan);
+LOG.info("Filter list: " 

hbase git commit: HBASE-18949 Remove the CompactionRequest parameter in preCompactSelection

2017-10-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 54da4405d -> c3b3fd788


HBASE-18949 Remove the CompactionRequest parameter in preCompactSelection

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c3b3fd78
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c3b3fd78
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c3b3fd78

Branch: refs/heads/master
Commit: c3b3fd7b8fd51f0dd1864a8cb618f88be16a
Parents: 54da440
Author: Peter Somogyi 
Authored: Thu Oct 5 09:59:28 2017 -0700
Committer: zhangduo 
Committed: Tue Oct 10 09:41:21 2017 +0800

--
 .../org/apache/hadoop/hbase/coprocessor/RegionObserver.java| 6 ++
 .../main/java/org/apache/hadoop/hbase/regionserver/HStore.java | 2 +-
 .../hadoop/hbase/regionserver/RegionCoprocessorHost.java   | 6 ++
 .../apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java  | 3 +--
 .../apache/hadoop/hbase/mob/compactions/TestMobCompactor.java  | 4 +---
 5 files changed, 7 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c3b3fd78/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index cbd6e4b..a1e4f0e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CompareOperator;
-import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
@@ -188,11 +187,10 @@ public interface RegionObserver {
* @param store the store where compaction is being requested
* @param candidates the store files currently available for compaction
* @param tracker tracker used to track the life cycle of a compaction
-   * @param request the requested compaction
*/
   default void 
preCompactSelection(ObserverContext c, Store 
store,
-  List candidates, CompactionLifeCycleTracker tracker,
-  CompactionRequest request) throws IOException {}
+  List candidates, CompactionLifeCycleTracker tracker)
+  throws IOException {}
 
   /**
* Called after the {@link StoreFile}s to compact have been selected from 
the available

http://git-wip-us.apache.org/repos/asf/hbase/blob/c3b3fd78/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index d8e82bb..d2009e3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -1666,7 +1666,7 @@ public class HStore implements Store, HeapSize, 
StoreConfigInformation, Propagat
   boolean override = false;
   //TODO: is it correct way to get CompactionRequest?
   override = getCoprocessorHost().preCompactSelection(this, 
candidatesForCoproc,
-tracker, null, user);
+tracker, user);
   if (override) {
 // Coprocessor is overriding normal file selection.
 compaction.forceSelect(new 
CompactionRequestImpl(candidatesForCoproc));

http://git-wip-us.apache.org/repos/asf/hbase/blob/c3b3fd78/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index b76980d..035c8d1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -569,18 +569,16 @@ public class RegionCoprocessorHost
* @param store The store where compaction is being requested
* @param candidates The currently available store files
* @param tracker used to track the life cycle of a compaction
-   * @param request the 

[1/2] hbase git commit: HBASE-18867 update maven enforcer plugin.

2017-10-09 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/branch-2 38e52bb29 -> ca62f769b
  refs/heads/master b727ab850 -> 54da4405d


HBASE-18867 update maven enforcer plugin.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/54da4405
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/54da4405
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/54da4405

Branch: refs/heads/master
Commit: 54da4405d7c3f180df92d7ce70b0a1899eb69d19
Parents: b727ab8
Author: Sean Busbey 
Authored: Fri Sep 22 15:36:13 2017 -0500
Committer: Mike Drob 
Committed: Mon Oct 9 20:32:24 2017 -0500

--
 pom.xml | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/54da4405/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 26366ff..faca511 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1519,8 +1519,7 @@
 ${hbase-surefire.argLine}
 0.7.5.201505241946
 1.0-beta-6
-
-1.4
+3.0.0-M1
 
 
${project.build.directory}/test-classes
 
-MM-dd'T'HH:mm:ss'Z'



[2/2] hbase git commit: HBASE-18867 update maven enforcer plugin.

2017-10-09 Thread mdrob
HBASE-18867 update maven enforcer plugin.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca62f769
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca62f769
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca62f769

Branch: refs/heads/branch-2
Commit: ca62f769b603514e97226d80d5306c5d067541a6
Parents: 38e52bb
Author: Sean Busbey 
Authored: Fri Sep 22 15:36:13 2017 -0500
Committer: Mike Drob 
Committed: Mon Oct 9 20:33:21 2017 -0500

--
 pom.xml | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca62f769/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 15929f2..b4d7e0f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1480,8 +1480,7 @@
 ${hbase-surefire.argLine}
 0.7.5.201505241946
 1.0-beta-6
-
-1.4
+3.0.0-M1
 
 
${project.build.directory}/test-classes
 
-MM-dd'T'HH:mm:ss'Z'



[1/2] hbase git commit: HBASE-18923 TestTableResource flaky on branch-1

2017-10-09 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 aa5097194 -> c48155a4d
  refs/heads/branch-1.4 4358af214 -> ab8b0a366


HBASE-18923 TestTableResource flaky on branch-1

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c48155a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c48155a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c48155a4

Branch: refs/heads/branch-1
Commit: c48155a4d117d2d8f8f3f483763e616968b5a1a1
Parents: aa50971
Author: Pankaj Kumar 
Authored: Thu Oct 5 11:44:29 2017 +0530
Committer: Andrew Purtell 
Committed: Mon Oct 9 16:52:10 2017 -0700

--
 .../hadoop/hbase/rest/TestTableResource.java| 27 ++--
 1 file changed, 14 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c48155a4/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index a7de2f1..3abcb4a 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Durability;
@@ -57,7 +58,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel;
 import org.apache.hadoop.hbase.rest.model.TableRegionModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -119,26 +120,26 @@ public class TestTableResource {
 table.close();
 // get the initial layout (should just be one region)
 
-RegionLocator regionLocator = connection.getRegionLocator(TABLE);
+final RegionLocator regionLocator = connection.getRegionLocator(TABLE);
 List m = regionLocator.getAllRegionLocations();
 assertEquals(m.size(), 1);
 // tell the master to split the table
 admin.split(TABLE);
-// give some time for the split to happen
 
-long timeout = System.currentTimeMillis() + (15 * 1000);
-while (System.currentTimeMillis() < timeout && m.size()!=2){
-  try {
-Thread.sleep(250);
-  } catch (InterruptedException e) {
-LOG.warn(StringUtils.stringifyException(e));
+// give some time for the split to happen
+long timeout = EnvironmentEdgeManager.currentTime() + (15 * 1000);
+TEST_UTIL.waitFor(timeout, 250, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws IOException {
+List regionLocations = 
regionLocator.getAllRegionLocations();
+return regionLocations.size() == 2 && 
regionLocations.get(0).getServerName() != null
+&& regionLocations.get(1).getServerName() != null;
   }
-  // check again
-  m = regionLocator.getAllRegionLocations();
-}
+});
+m = regionLocator.getAllRegionLocations();
 
 // should have two regions now
-assertEquals(m.size(), 2);
+assertEquals(2, m.size());
 regionMap = m;
 LOG.info("regions: " + regionMap);
 regionLocator.close();



[2/2] hbase git commit: HBASE-18923 TestTableResource flaky on branch-1

2017-10-09 Thread apurtell
HBASE-18923 TestTableResource flaky on branch-1

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ab8b0a36
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ab8b0a36
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ab8b0a36

Branch: refs/heads/branch-1.4
Commit: ab8b0a36624e294cc5d9690fde3fca796449b58c
Parents: 4358af2
Author: Pankaj Kumar 
Authored: Thu Oct 5 11:44:29 2017 +0530
Committer: Andrew Purtell 
Committed: Mon Oct 9 16:52:18 2017 -0700

--
 .../hadoop/hbase/rest/TestTableResource.java| 27 ++--
 1 file changed, 14 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ab8b0a36/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index a7de2f1..3abcb4a 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Durability;
@@ -57,7 +58,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel;
 import org.apache.hadoop.hbase.rest.model.TableRegionModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -119,26 +120,26 @@ public class TestTableResource {
 table.close();
 // get the initial layout (should just be one region)
 
-RegionLocator regionLocator = connection.getRegionLocator(TABLE);
+final RegionLocator regionLocator = connection.getRegionLocator(TABLE);
 List m = regionLocator.getAllRegionLocations();
 assertEquals(m.size(), 1);
 // tell the master to split the table
 admin.split(TABLE);
-// give some time for the split to happen
 
-long timeout = System.currentTimeMillis() + (15 * 1000);
-while (System.currentTimeMillis() < timeout && m.size()!=2){
-  try {
-Thread.sleep(250);
-  } catch (InterruptedException e) {
-LOG.warn(StringUtils.stringifyException(e));
+// give some time for the split to happen
+long timeout = EnvironmentEdgeManager.currentTime() + (15 * 1000);
+TEST_UTIL.waitFor(timeout, 250, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws IOException {
+List regionLocations = 
regionLocator.getAllRegionLocations();
+return regionLocations.size() == 2 && 
regionLocations.get(0).getServerName() != null
+&& regionLocations.get(1).getServerName() != null;
   }
-  // check again
-  m = regionLocator.getAllRegionLocations();
-}
+});
+m = regionLocator.getAllRegionLocations();
 
 // should have two regions now
-assertEquals(m.size(), 2);
+assertEquals(2, m.size());
 regionMap = m;
 LOG.info("regions: " + regionMap);
 regionLocator.close();



[3/3] hbase git commit: Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned"

2017-10-09 Thread busbey
Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells 
returned"

This reverts commit 256fc63007aecb63028b71ad1383d896f11db701.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aa509719
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aa509719
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aa509719

Branch: refs/heads/branch-1
Commit: aa50971947b2eee9b5b9b4700e0b8a0fa93b377f
Parents: c7dc0da
Author: Sean Busbey 
Authored: Fri Oct 6 14:04:15 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 17:06:14 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  82 +
 .../org/apache/hadoop/hbase/KeyValueUtil.java   |  15 ---
 .../hadoop/hbase/filter/TestFilterList.java | 117 +--
 3 files changed, 3 insertions(+), 211 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/aa509719/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index ddbd4a7..be22e5d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -21,10 +21,8 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.List;
 
-import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
@@ -71,14 +69,6 @@ final public class FilterList extends Filter {
   private List filters = new ArrayList();
   private Filter seekHintFilter = null;
 
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceKV = null;
 
@@ -103,7 +93,6 @@ final public class FilterList extends Filter {
 } else {
   this.filters = new ArrayList(rowFilters);
 }
-initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -113,7 +102,6 @@ final public class FilterList extends Filter {
*/
   public FilterList(final Filter... rowFilters) {
 this.filters = new ArrayList(Arrays.asList(rowFilters));
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -123,7 +111,6 @@ final public class FilterList extends Filter {
*/
   public FilterList(final Operator operator) {
 this.operator = operator;
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -135,7 +122,6 @@ final public class FilterList extends Filter {
   public FilterList(final Operator operator, final List rowFilters) {
 this.filters = new ArrayList(rowFilters);
 this.operator = operator;
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -147,18 +133,6 @@ final public class FilterList extends Filter {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this.filters = new ArrayList(Arrays.asList(rowFilters));
 this.operator = operator;
-initPrevListForMustPassOne(filters.size());
-  }
-
-  public void initPrevListForMustPassOne(int size) {
-if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevFilterRCList == null) {
-prevFilterRCList = new ArrayList(Collections.nCopies(size, 
(ReturnCode) null));
-  }
-  if (this.prevCellList == null) {
-prevCellList = new ArrayList(Collections.nCopies(size, (Cell) 
null));
-  }
-}
   }
 
   /**
@@ -191,10 +165,6 @@ final public class FilterList extends Filter {
   + this.isReversed());
 }
 this.filters.add(filter);
-if (operator == Operator.MUST_PASS_ONE) {
-  this.prevFilterRCList.add((ReturnCode) null);
-  this.prevCellList.add((Cell) null);
-}
   }
 
   @Override
@@ -202,10 +172,6 @@ final public class FilterList extends Filter {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   

[1/3] hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 8d99b0cfe -> aa5097194


HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8d77c1e9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8d77c1e9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8d77c1e9

Branch: refs/heads/branch-1
Commit: 8d77c1e95480d5658bd4ebccb5cc56bdd387c886
Parents: 8d99b0c
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 17:05:39 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8d77c1e9/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = table.getScanner(scan);
+LOG.info("Filter list: " + 

[2/3] hbase git commit: Revert "HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT"

2017-10-09 Thread busbey
Revert "HBASE-15410 Utilize the max seek value when all Filters in 
MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT"

This reverts commit ead106324474d2faff11b4e1779e575aec67b59c.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c7dc0da8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c7dc0da8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c7dc0da8

Branch: refs/heads/branch-1
Commit: c7dc0da849e543cc5205f54eb69c5d75bd442f73
Parents: 8d77c1e
Author: Sean Busbey 
Authored: Fri Oct 6 14:04:00 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 17:06:09 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 32 
 .../hadoop/hbase/filter/TestFilterList.java |  4 +--
 2 files changed, 8 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c7dc0da8/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 74736db..ddbd4a7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -22,15 +22,12 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -72,7 +69,7 @@ final public class FilterList extends Filter {
   private static final int MAX_LOG_FILTERS = 5;
   private Operator operator = Operator.MUST_PASS_ALL;
   private List filters = new ArrayList();
-  private Set seekHintFilter = new HashSet<>();
+  private Filter seekHintFilter = null;
 
   /**
* Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
@@ -210,7 +207,7 @@ final public class FilterList extends Filter {
 prevCellList.set(i, null);
   }
 }
-seekHintFilter.clear();
+seekHintFilter = null;
   }
 
   @Override
@@ -317,7 +314,6 @@ final public class FilterList extends Filter {
 justification="Intentional")
   public ReturnCode filterKeyValue(Cell v) throws IOException {
 this.referenceKV = v;
-seekHintFilter.clear();
 
 // Accumulates successive transformation of every filter that includes the 
Cell:
 Cell transformed = v;
@@ -349,12 +345,10 @@ final public class FilterList extends Filter {
   transformed = filter.transformCell(transformed);
   continue;
 case SEEK_NEXT_USING_HINT:
-  seekHintFilter.add(filter);
-  continue;
+  seekHintFilter = filter;
+  return code;
 default:
-  if (seekHintFilter.isEmpty()) {
-return code;
-  }
+  return code;
 }
   } else if (operator == Operator.MUST_PASS_ONE) {
 Cell prevCell = this.prevCellList.get(i);
@@ -404,10 +398,6 @@ final public class FilterList extends Filter {
   }
 }
 
-if (!seekHintFilter.isEmpty()) {
-  return ReturnCode.SEEK_NEXT_USING_HINT;
-}
-
 // Save the transformed Cell for transform():
 this.transformedKV = transformed;
 
@@ -532,17 +522,7 @@ final public class FilterList extends Filter {
   public Cell getNextCellHint(Cell currentKV) throws IOException {
 Cell keyHint = null;
 if (operator == Operator.MUST_PASS_ALL) {
-  for (Filter filter : seekHintFilter) {
-if (filter.filterAllRemaining()) continue;
-Cell curKeyHint = filter.getNextCellHint(currentKV);
-if (keyHint == null) {
-  keyHint = curKeyHint;
-  continue;
-}
-if (KeyValue.COMPARATOR.compare(keyHint, curKeyHint) < 0) {
-  keyHint = curKeyHint;
-}
-  }
+  if (seekHintFilter != null) keyHint = 
seekHintFilter.getNextCellHint(currentKV);
   return keyHint;
 }
 


[5/6] hbase git commit: Revert "HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum"

2017-10-09 Thread busbey
Revert "HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells 
returned - addendum"

This reverts commit 347bef8d336377ae38bb6c357da9d033ccba155c.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/852b5783
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/852b5783
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/852b5783

Branch: refs/heads/branch-2
Commit: 852b5783841aeb2a55b9ec6dd7ef37b4f2269b1c
Parents: 1d07c8e
Author: Sean Busbey 
Authored: Fri Oct 6 13:20:03 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:54:22 2017 -0500

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 2 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/852b5783/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 33d5b45..87dae1c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -27,7 +27,6 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -145,7 +144,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevFilterRCList == null) {
+  if (this.prevCellList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -404,14 +403,7 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
-  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
-  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
-  // need not save current cell to prevCellList for saving heap memory.
-  prevCellList.set(i, null);
-} else {
-  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
-}
+prevCellList.set(i, c);
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[2/6] hbase git commit: Revert "HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()"

2017-10-09 Thread busbey
Revert "HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()"

This reverts commit d142f071295f67177074cd130eb0ee2d30bdfbcd.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3dd66e6c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3dd66e6c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3dd66e6c

Branch: refs/heads/branch-2
Commit: 3dd66e6cda4898ce381bc19d5830c0029992292f
Parents: 08aea43
Author: Sean Busbey 
Authored: Fri Oct 6 13:18:39 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:53:50 2017 -0500

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3dd66e6c/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 033ca83..7f2405d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,7 +566,6 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
-  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -578,7 +577,6 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
-  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[4/6] hbase git commit: Revert "HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT"

2017-10-09 Thread busbey
Revert "HBASE-15410 Utilize the max seek value when all Filters in 
MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT"

This reverts commit 743f3ae221531f553dba84de7fc0adfde70cd04b.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1d07c8ee
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1d07c8ee
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1d07c8ee

Branch: refs/heads/branch-2
Commit: 1d07c8eec48578c7b462811cf5ada15fd8c36e6b
Parents: 2dcdd13
Author: Sean Busbey 
Authored: Fri Oct 6 13:19:22 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:54:20 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 31 
 .../hadoop/hbase/filter/TestFilterList.java |  6 ++--
 2 files changed, 9 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1d07c8ee/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 1e80a7e..33d5b45 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -22,9 +22,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
@@ -67,7 +65,7 @@ final public class FilterList extends FilterBase {
   private static final int MAX_LOG_FILTERS = 5;
   private Operator operator = Operator.MUST_PASS_ALL;
   private final List filters;
-  private Set seekHintFilter = new HashSet<>();
+  private Filter seekHintFilter = null;
 
   /**
* Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
@@ -236,7 +234,7 @@ final public class FilterList extends FilterBase {
 prevCellList.set(i, null);
   }
 }
-seekHintFilter.clear();
+seekHintFilter = null;
   }
 
   @Override
@@ -360,7 +358,6 @@ final public class FilterList extends FilterBase {
   return ReturnCode.INCLUDE;
 }
 this.referenceCell = c;
-seekHintFilter.clear();
 
 // Accumulates successive transformation of every filter that includes the 
Cell:
 Cell transformed = c;
@@ -392,12 +389,10 @@ final public class FilterList extends FilterBase {
   transformed = filter.transformCell(transformed);
   continue;
 case SEEK_NEXT_USING_HINT:
-  seekHintFilter.add(filter);
-  continue;
+  seekHintFilter = filter;
+  return code;
 default:
-  if (seekHintFilter.isEmpty()) {
-return code;
-  }
+  return code;
 }
   } else if (operator == Operator.MUST_PASS_ONE) {
 Cell prevCell = this.prevCellList.get(i);
@@ -447,10 +442,6 @@ final public class FilterList extends FilterBase {
   }
 }
 
-if (!seekHintFilter.isEmpty()) {
-  return ReturnCode.SEEK_NEXT_USING_HINT;
-}
-
 // Save the transformed Cell for transform():
 this.transformedCell = transformed;
 
@@ -574,17 +565,7 @@ final public class FilterList extends FilterBase {
 }
 Cell keyHint = null;
 if (operator == Operator.MUST_PASS_ALL) {
-  for (Filter filter : seekHintFilter) {
-if (filter.filterAllRemaining()) continue;
-Cell curKeyHint = filter.getNextCellHint(currentCell);
-if (keyHint == null) {
-  keyHint = curKeyHint;
-  continue;
-}
-if (CellComparator.COMPARATOR.compare(keyHint, curKeyHint) < 0) {
-  keyHint = curKeyHint;
-}
-  }
+  if (seekHintFilter != null) keyHint = 
seekHintFilter.getNextCellHint(currentCell);
   return keyHint;
 }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/1d07c8ee/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index f20a9ba..90f95a5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 

[6/6] hbase git commit: Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned"

2017-10-09 Thread busbey
Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells 
returned"

This reverts commit 0d0c330401ade938bf934aafd79ec23705edcc60.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/38e52bb2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/38e52bb2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/38e52bb2

Branch: refs/heads/branch-2
Commit: 38e52bb29e173c0e47f29214618133e09cd5f96b
Parents: 852b578
Author: Sean Busbey 
Authored: Fri Oct 6 13:20:17 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:54:26 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  76 +---
 .../hadoop/hbase/filter/TestFilterList.java | 117 ---
 2 files changed, 2 insertions(+), 191 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/38e52bb2/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 87dae1c..9c4da41 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -66,14 +66,6 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Filter seekHintFilter = null;
 
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -95,7 +87,6 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -115,7 +106,6 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -127,7 +117,6 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -139,21 +128,8 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
-  }
-
-  public void initPrevListForMustPassOne(int size) {
-if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
-prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
-  }
-  if (this.prevCellList == null) {
-prevCellList = new ArrayList<>(Collections.nCopies(size, null));
-  }
-}
   }
 
-
   /**
* Get the operator.
*
@@ -208,10 +184,6 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
-if (operator == Operator.MUST_PASS_ONE) {
-  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
-  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
-}
   }
 
   /**
@@ -228,10 +200,6 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
-  if (operator == Operator.MUST_PASS_ONE) {
-prevFilterRCList.set(i, null);
-prevCellList.set(i, null);
-  }
 }
 seekHintFilter = null;
   }
@@ -314,41 +282,6 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
-  /**
-   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
-   * the next cell passing to filterList will be the first cell in next 
column, because if filter-B
-   * in filter list return SKIP, then the 

[3/6] hbase git commit: Revert "HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue"

2017-10-09 Thread busbey
Revert "HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue"

This reverts commit 7c2622baf75ac414547488799216cdf2b37be7bd.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2dcdd13a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2dcdd13a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2dcdd13a

Branch: refs/heads/branch-2
Commit: 2dcdd13a01884dcd4176299aa45bd80079c675b2
Parents: 3dd66e6
Author: Sean Busbey 
Authored: Fri Oct 6 13:19:13 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:54:15 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 542 +++
 .../hadoop/hbase/filter/TestFilterList.java | 146 ++---
 2 files changed, 219 insertions(+), 469 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2dcdd13a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 7f2405d..1e80a7e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,53 +90,62 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-reversed = checkAndGetReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s. The default operator
+   * MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-this(Operator.MUST_PASS_ALL, rowFilters);
+reversed = getReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
-   * is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
+   * MUST_PASS_ALL is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
+this(Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
+   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this(operator, new ArrayList<>());
+this.operator = operator;
+this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   *
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+this(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
+   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(operator, Arrays.asList(rowFilters));
+this(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
   }
 
-  private void initPrevListForMustPassOne(int size) {
+  public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -147,8 +156,10 @@ final public class FilterList extends FilterBase {
 }
   }
 
+
   /**
* Get the operator.
+   *
* @return operator
*/
   public Operator getOperator() {
@@ -157,6 +168,7 @@ final public class FilterList extends 

[1/6] hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e4647da0d -> 38e52bb29


HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/08aea434
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/08aea434
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/08aea434

Branch: refs/heads/branch-2
Commit: 08aea43472e662cc23dfced74af9cfffa46b2081
Parents: e4647da
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:53:20 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/08aea434/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = table.getScanner(scan);
+LOG.info("Filter list: " + 

[4/6] hbase git commit: Revert "HBASE-18904 Missing break in NEXT_ROW case of FilterList#mergeReturnCodeForOrOperator()"

2017-10-09 Thread busbey
Revert "HBASE-18904 Missing break in NEXT_ROW case of 
FilterList#mergeReturnCodeForOrOperator()"

This reverts commit 3bd824facadd40dabb06e19ccad911be0ea6915a.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e8fa9cc8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e8fa9cc8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e8fa9cc8

Branch: refs/heads/master
Commit: e8fa9cc85f708d8866fdff7b7a502fdeefc8f0d4
Parents: 9cabf34
Author: Sean Busbey 
Authored: Fri Oct 6 11:19:00 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:49:20 2017 -0500

--
 .../src/main/java/org/apache/hadoop/hbase/filter/FilterList.java   | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e8fa9cc8/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 033ca83..7f2405d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -566,7 +566,6 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
 return ReturnCode.NEXT_ROW;
   }
-  break;
 case SEEK_NEXT_USING_HINT:
   if (isInReturnCodes(rc, ReturnCode.INCLUDE, 
ReturnCode.INCLUDE_AND_NEXT_COL,
 ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
@@ -578,7 +577,6 @@ final public class FilterList extends FilterBase {
   if (isInReturnCodes(rc, ReturnCode.SEEK_NEXT_USING_HINT)) {
 return ReturnCode.SEEK_NEXT_USING_HINT;
   }
-  break;
 }
 throw new IllegalStateException(
 "Received code is not valid. rc: " + rc + ", localRC: " + localRC);



[2/6] hbase git commit: Revert "HBASE-15410 Utilize the max seek value when all Filters in MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT"

2017-10-09 Thread busbey
Revert "HBASE-15410 Utilize the max seek value when all Filters in 
MUST_PASS_ALL FilterList return SEEK_NEXT_USING_HINT"

This reverts commit df34300cd3f89c1efdea43b0b2ecb64c317e1a34.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4eea0d92
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4eea0d92
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4eea0d92

Branch: refs/heads/master
Commit: 4eea0d923e37180b25d89db676aed2f699f5e0ba
Parents: 183b3e3
Author: Sean Busbey 
Authored: Fri Oct 6 11:19:53 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:49:20 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 31 
 .../hadoop/hbase/filter/TestFilterList.java |  6 ++--
 2 files changed, 9 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4eea0d92/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 1e80a7e..33d5b45 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -22,9 +22,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
@@ -67,7 +65,7 @@ final public class FilterList extends FilterBase {
   private static final int MAX_LOG_FILTERS = 5;
   private Operator operator = Operator.MUST_PASS_ALL;
   private final List filters;
-  private Set seekHintFilter = new HashSet<>();
+  private Filter seekHintFilter = null;
 
   /**
* Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
@@ -236,7 +234,7 @@ final public class FilterList extends FilterBase {
 prevCellList.set(i, null);
   }
 }
-seekHintFilter.clear();
+seekHintFilter = null;
   }
 
   @Override
@@ -360,7 +358,6 @@ final public class FilterList extends FilterBase {
   return ReturnCode.INCLUDE;
 }
 this.referenceCell = c;
-seekHintFilter.clear();
 
 // Accumulates successive transformation of every filter that includes the 
Cell:
 Cell transformed = c;
@@ -392,12 +389,10 @@ final public class FilterList extends FilterBase {
   transformed = filter.transformCell(transformed);
   continue;
 case SEEK_NEXT_USING_HINT:
-  seekHintFilter.add(filter);
-  continue;
+  seekHintFilter = filter;
+  return code;
 default:
-  if (seekHintFilter.isEmpty()) {
-return code;
-  }
+  return code;
 }
   } else if (operator == Operator.MUST_PASS_ONE) {
 Cell prevCell = this.prevCellList.get(i);
@@ -447,10 +442,6 @@ final public class FilterList extends FilterBase {
   }
 }
 
-if (!seekHintFilter.isEmpty()) {
-  return ReturnCode.SEEK_NEXT_USING_HINT;
-}
-
 // Save the transformed Cell for transform():
 this.transformedCell = transformed;
 
@@ -574,17 +565,7 @@ final public class FilterList extends FilterBase {
 }
 Cell keyHint = null;
 if (operator == Operator.MUST_PASS_ALL) {
-  for (Filter filter : seekHintFilter) {
-if (filter.filterAllRemaining()) continue;
-Cell curKeyHint = filter.getNextCellHint(currentCell);
-if (keyHint == null) {
-  keyHint = curKeyHint;
-  continue;
-}
-if (CellComparator.COMPARATOR.compare(keyHint, curKeyHint) < 0) {
-  keyHint = curKeyHint;
-}
-  }
+  if (seekHintFilter != null) keyHint = 
seekHintFilter.getNextCellHint(currentCell);
   return keyHint;
 }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/4eea0d92/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index f20a9ba..90f95a5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ 

[5/6] hbase git commit: Revert "HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells returned - addendum"

2017-10-09 Thread busbey
Revert "HBASE-17678 FilterList with MUST_PASS_ONE lead to redundancy cells 
returned - addendum"

This reverts commit 0eeafa04f26f2388602a7c74b496e6682716f50a.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f97c0bd8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f97c0bd8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f97c0bd8

Branch: refs/heads/master
Commit: f97c0bd8b55a09875dd94a32d8785dcf1944ea5c
Parents: 4eea0d9
Author: Sean Busbey 
Authored: Fri Oct 6 11:21:10 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:49:20 2017 -0500

--
 .../java/org/apache/hadoop/hbase/filter/FilterList.java | 12 ++--
 1 file changed, 2 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f97c0bd8/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 33d5b45..87dae1c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -27,7 +27,6 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -145,7 +144,7 @@ final public class FilterList extends FilterBase {
 
   public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevFilterRCList == null) {
+  if (this.prevCellList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
   }
   if (this.prevCellList == null) {
@@ -404,14 +403,7 @@ final public class FilterList extends FilterBase {
 ReturnCode localRC = filter.filterKeyValue(c);
 // Update previous cell and return code we encountered.
 prevFilterRCList.set(i, localRC);
-if (c == null || localRC == ReturnCode.INCLUDE || localRC == 
ReturnCode.SKIP) {
-  // If previous return code is INCLUDE or SKIP, we should always pass 
the next cell to the
-  // corresponding sub-filter(need not test 
shouldPassCurrentCellToFilter() method), So we
-  // need not save current cell to prevCellList for saving heap memory.
-  prevCellList.set(i, null);
-} else {
-  prevCellList.set(i, KeyValueUtil.toNewKeyCell(c));
-}
+prevCellList.set(i, c);
 
 if (localRC != ReturnCode.SEEK_NEXT_USING_HINT) {
   seenNonHintReturnCode = true;



[6/6] hbase git commit: Revert "HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue"

2017-10-09 Thread busbey
Revert "HBASE-18160 Fix incorrect logic in FilterList.filterKeyValue"

This reverts commit f54cc1ca51440ee0f445a008f2f31697730fd7fe.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/183b3e31
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/183b3e31
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/183b3e31

Branch: refs/heads/master
Commit: 183b3e31bd6255cb7b1e312f37a06fb60d7f21d7
Parents: e8fa9cc
Author: Sean Busbey 
Authored: Fri Oct 6 11:19:39 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:49:20 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  | 542 +++
 .../hadoop/hbase/filter/TestFilterList.java | 146 ++---
 2 files changed, 219 insertions(+), 469 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/183b3e31/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 7f2405d..1e80a7e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -90,53 +90,62 @@ final public class FilterList extends FilterBase {
   private Cell transformedCell = null;
 
   /**
-   * Constructor that takes a set of {@link Filter}s and an operator.
-   * @param operator Operator to process filter set with.
-   * @param rowFilters Set of row filters.
-   */
-  public FilterList(final Operator operator, final List rowFilters) {
-reversed = checkAndGetReversed(rowFilters, reversed);
-this.filters = new ArrayList<>(rowFilters);
-this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
-  }
-
-  /**
-   * Constructor that takes a set of {@link Filter}s. The default operator 
MUST_PASS_ALL is assumed.
+   * Constructor that takes a set of {@link Filter}s. The default operator
+   * MUST_PASS_ALL is assumed.
* All filters are cloned to internal list.
* @param rowFilters list of filters
*/
   public FilterList(final List rowFilters) {
-this(Operator.MUST_PASS_ALL, rowFilters);
+reversed = getReversed(rowFilters, reversed);
+this.filters = new ArrayList<>(rowFilters);
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
-   * Constructor that takes a var arg number of {@link Filter}s. The default 
operator MUST_PASS_ALL
-   * is assumed.
+   * Constructor that takes a var arg number of {@link Filter}s. The fefault 
operator
+   * MUST_PASS_ALL is assumed.
* @param rowFilters
*/
   public FilterList(final Filter... rowFilters) {
-this(Operator.MUST_PASS_ALL, Arrays.asList(rowFilters));
+this(Arrays.asList(rowFilters));
   }
 
   /**
* Constructor that takes an operator.
+   *
* @param operator Operator to process filter set with.
*/
   public FilterList(final Operator operator) {
-this(operator, new ArrayList<>());
+this.operator = operator;
+this.filters = new ArrayList<>();
+initPrevListForMustPassOne(filters.size());
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   *
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List rowFilters) {
+this(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
* Constructor that takes a var arg number of {@link Filter}s and an 
operator.
+   *
* @param operator Operator to process filter set with.
* @param rowFilters Filters to use
*/
   public FilterList(final Operator operator, final Filter... rowFilters) {
-this(operator, Arrays.asList(rowFilters));
+this(rowFilters);
+this.operator = operator;
+initPrevListForMustPassOne(rowFilters.length);
   }
 
-  private void initPrevListForMustPassOne(int size) {
+  public void initPrevListForMustPassOne(int size) {
 if (operator == Operator.MUST_PASS_ONE) {
   if (this.prevFilterRCList == null) {
 prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
@@ -147,8 +156,10 @@ final public class FilterList extends FilterBase {
 }
   }
 
+
   /**
* Get the operator.
+   *
* @return operator
*/
   public Operator getOperator() {
@@ -157,6 +168,7 @@ final public class FilterList extends FilterBase 

[3/6] hbase git commit: HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using MUST_PASS_ONE operator will return results that match either of the FamilyFilters

2017-10-09 Thread busbey
HBASE-18957 add test that confirms 2 FamilyFilters in a FilterList using 
MUST_PASS_ONE operator will return results that match either of the 
FamilyFilters

Amending-Author: Sean Busbey 

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9cabf34e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9cabf34e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9cabf34e

Branch: refs/heads/master
Commit: 9cabf34e7bd9bdd85920f5cd3bdfe5a331166ec4
Parents: 0ff4f5f
Author: Peter Somogyi 
Authored: Fri Oct 6 09:26:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:49:20 2017 -0500

--
 .../hbase/filter/TestFilterListOnMini.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9cabf34e/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
new file mode 100644
index 000..dd2399f
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java
@@ -0,0 +1,88 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.testclassification.FilterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests filter Lists in ways that rely on a MiniCluster.
+ * Where possible, favor tests in TestFilterList and TestFilterFromRegionSide 
instead.
+ */
+@Category({MediumTests.class, FilterTests.class})
+public class TestFilterListOnMini {
+
+  private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class);
+  private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
+
+  @Rule
+  public TestName name = new TestName();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFiltersWithOR() throws Exception {
+TableName tn = TableName.valueOf(name.getMethodName());
+Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" });
+byte[] CF1 = Bytes.toBytes("cf1");
+byte[] CF2 = Bytes.toBytes("cf2");
+Put put1 = new Put(Bytes.toBytes("0"));
+put1.addColumn(CF1, Bytes.toBytes("col_a"), Bytes.toBytes(0));
+table.put(put1);
+Put put2 = new Put(Bytes.toBytes("0"));
+put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0));
+table.put(put2);
+FamilyFilter filterCF1 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF1));
+FamilyFilter filterCF2 =
+new FamilyFilter(CompareFilter.CompareOp.EQUAL, new 
BinaryComparator(CF2));
+FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
+filterList.addFilter(filterCF1);
+filterList.addFilter(filterCF2);
+Scan scan = new Scan();
+scan.setFilter(filterList);
+ResultScanner scanner = table.getScanner(scan);
+LOG.info("Filter list: " + filterList);
+for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {
+ 

[1/6] hbase git commit: Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells returned"

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 0ff4f5fba -> b727ab850


Revert "HBASE-17678 FilterList with MUST_PASS_ONE may lead to redundant cells 
returned"

This reverts commit 0d0c330401ade938bf934aafd79ec23705edcc60.

Backing out filterlist regression, see HBASE-18957. Work continuing branch for 
HBASE-18410.

Signed-off-by: Peter Somogyi 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b727ab85
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b727ab85
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b727ab85

Branch: refs/heads/master
Commit: b727ab850cbabcc480d5ede2f1970e896e6f3e46
Parents: f97c0bd
Author: Sean Busbey 
Authored: Fri Oct 6 11:21:30 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 16:49:20 2017 -0500

--
 .../apache/hadoop/hbase/filter/FilterList.java  |  76 +---
 .../hadoop/hbase/filter/TestFilterList.java | 117 ---
 2 files changed, 2 insertions(+), 191 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b727ab85/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 87dae1c..9c4da41 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -66,14 +66,6 @@ final public class FilterList extends FilterBase {
   private final List filters;
   private Filter seekHintFilter = null;
 
-  /**
-   * Save previous return code and previous cell for every filter in filter 
list. For MUST_PASS_ONE,
-   * we use the previous return code to decide whether we should pass current 
cell encountered to
-   * the filter. For MUST_PASS_ALL, the two list are meaningless.
-   */
-  private List prevFilterRCList = null;
-  private List prevCellList = null;
-
   /** Reference Cell used by {@link #transformCell(Cell)} for validation 
purpose. */
   private Cell referenceCell = null;
 
@@ -95,7 +87,6 @@ final public class FilterList extends FilterBase {
   public FilterList(final List rowFilters) {
 reversed = getReversed(rowFilters, reversed);
 this.filters = new ArrayList<>(rowFilters);
-initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -115,7 +106,6 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator) {
 this.operator = operator;
 this.filters = new ArrayList<>();
-initPrevListForMustPassOne(filters.size());
   }
 
   /**
@@ -127,7 +117,6 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final List rowFilters) {
 this(rowFilters);
 this.operator = operator;
-initPrevListForMustPassOne(rowFilters.size());
   }
 
   /**
@@ -139,21 +128,8 @@ final public class FilterList extends FilterBase {
   public FilterList(final Operator operator, final Filter... rowFilters) {
 this(rowFilters);
 this.operator = operator;
-initPrevListForMustPassOne(rowFilters.length);
-  }
-
-  public void initPrevListForMustPassOne(int size) {
-if (operator == Operator.MUST_PASS_ONE) {
-  if (this.prevCellList == null) {
-prevFilterRCList = new ArrayList<>(Collections.nCopies(size, null));
-  }
-  if (this.prevCellList == null) {
-prevCellList = new ArrayList<>(Collections.nCopies(size, null));
-  }
-}
   }
 
-
   /**
* Get the operator.
*
@@ -208,10 +184,6 @@ final public class FilterList extends FilterBase {
   public void addFilter(List filters) {
 checkReversed(filters, isReversed());
 this.filters.addAll(filters);
-if (operator == Operator.MUST_PASS_ONE) {
-  this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
-  this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
-}
   }
 
   /**
@@ -228,10 +200,6 @@ final public class FilterList extends FilterBase {
 int listize = filters.size();
 for (int i = 0; i < listize; i++) {
   filters.get(i).reset();
-  if (operator == Operator.MUST_PASS_ONE) {
-prevFilterRCList.set(i, null);
-prevCellList.set(i, null);
-  }
 }
 seekHintFilter = null;
   }
@@ -314,41 +282,6 @@ final public class FilterList extends FilterBase {
 return this.transformedCell;
   }
 
-  /**
-   * For MUST_PASS_ONE, we cannot make sure that when filter-A in filter list 
return NEXT_COL then
-   * the next cell passing to filterList will be the first cell in 

hbase git commit: HBASE-18842 Fix unknown namespace message in clone_snapshot

2017-10-09 Thread jyates
Repository: hbase
Updated Branches:
  refs/heads/master 087edf017 -> 0ff4f5fba


HBASE-18842 Fix unknown namespace message in clone_snapshot

Signed-off-by: Jesse Yates 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0ff4f5fb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0ff4f5fb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0ff4f5fb

Branch: refs/heads/master
Commit: 0ff4f5fba9cef8b4dea599357d935b47f6151152
Parents: 087edf0
Author: Thoralf Gutierrez 
Authored: Tue Sep 26 10:01:53 2017 -0700
Committer: Jesse Yates 
Committed: Mon Oct 9 14:39:59 2017 -0700

--
 .../main/ruby/shell/commands/clone_snapshot.rb  |  4 ++
 .../src/test/ruby/shell/commands_test.rb| 48 
 2 files changed, 52 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0ff4f5fb/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb 
b/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
index 6d9c5de..8f0b35b 100644
--- a/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/clone_snapshot.rb
@@ -47,6 +47,10 @@ EOF
   tableName = args[1]
   raise "Table already exists: #{tableName}!"
 end
+if cause.is_a?(org.apache.hadoop.hbase.NamespaceNotFoundException)
+  namespace_name = args[1].split(':')[0]
+  raise "Unknown namespace: #{namespace_name}!"
+end
   end
 end
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/0ff4f5fb/hbase-shell/src/test/ruby/shell/commands_test.rb
--
diff --git a/hbase-shell/src/test/ruby/shell/commands_test.rb 
b/hbase-shell/src/test/ruby/shell/commands_test.rb
index 9fa291a..5daf9fa 100644
--- a/hbase-shell/src/test/ruby/shell/commands_test.rb
+++ b/hbase-shell/src/test/ruby/shell/commands_test.rb
@@ -21,6 +21,9 @@ require 'hbase_constants'
 require 'hbase/table'
 require 'shell'
 
+##
+# Tests whether all registered commands have a help and command method
+
 class ShellCommandsTest < Test::Unit::TestCase
   Shell.commands.each do |name, klass|
 define_test "#{name} command class #{klass} should respond to help" do
@@ -32,3 +35,48 @@ class ShellCommandsTest < Test::Unit::TestCase
 end
   end
 end
+
+##
+# Tests commands from the point of view of the shell to validate
+# that the error messages returned to the user are correct
+
+class ShellCloneSnapshotTest < Test::Unit::TestCase
+  include Hbase::TestHelpers
+
+  def setup
+setup_hbase
+@shell.interactive = false
+# Create test table
+@test_name = 'hbase_shell_tests_table'
+drop_test_table(@test_name)
+create_test_table(@test_name)
+# Test snapshot name
+@create_test_snapshot = 'hbase_shell_tests_snapshot'
+drop_test_snapshot
+  end
+
+  def teardown
+drop_test_table(@test_name)
+drop_test_snapshot
+shutdown
+  end
+
+  define_test 'Clone snapshot with table that already exists' do
+existing_table = 'existing_table'
+create_test_table(existing_table)
+admin.snapshot(@test_name, @create_test_snapshot)
+error = assert_raise(RuntimeError) do
+  @shell.command(:clone_snapshot, @create_test_snapshot, existing_table)
+end
+assert_match(/Table already exists: existing_table!/, error.message)
+  end
+
+  define_test 'Clone snapshot with unknown namespace' do
+clone_table = 'does_not_exist:test_clone_snapshot_table'
+admin.snapshot(@test_name, @create_test_snapshot)
+error = assert_raise(RuntimeError) do
+  @shell.command(:clone_snapshot, @create_test_snapshot, clone_table)
+end
+assert_match(/Unknown namespace: does_not_exist!/, error.message)
+  end
+end



[2/5] hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4358af21
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4358af21
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4358af21

Branch: refs/heads/branch-1.4
Commit: 4358af214b4a00614b0326bc0c3f0917756e1a5a
Parents: b6f14bd
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:46:51 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4358af21/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



[4/5] hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ffed972f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ffed972f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ffed972f

Branch: refs/heads/branch-1.2
Commit: ffed972fd0d4c22bf6b069217f589e583c83fb77
Parents: 2f0101d
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 12:00:52 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ffed972f/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



[5/5] hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c01c55d5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c01c55d5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c01c55d5

Branch: refs/heads/branch-1.1
Commit: c01c55d5f1b6dc3c211868eb3de29374c5bb2b2c
Parents: decf01d
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 12:01:33 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c01c55d5/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



[3/5] hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5b7bcdfc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5b7bcdfc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5b7bcdfc

Branch: refs/heads/branch-1.3
Commit: 5b7bcdfc4da5a521db7d4d85b805cf5b4cc7e39a
Parents: 0d836d9
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:47:32 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7bcdfc/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



[1/5] hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 50ef066c8 -> 8d99b0cfe
  refs/heads/branch-1.1 decf01d4b -> c01c55d5f
  refs/heads/branch-1.2 2f0101df2 -> ffed972fd
  refs/heads/branch-1.3 0d836d986 -> 5b7bcdfc4
  refs/heads/branch-1.4 b6f14bd2e -> 4358af214


HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8d99b0cf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8d99b0cf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8d99b0cf

Branch: refs/heads/branch-1
Commit: 8d99b0cfe4e306aace84b3cbaeed9913c0440e7e
Parents: 50ef066
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:45:28 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8d99b0cf/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



[2/2] hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e4647da0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e4647da0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e4647da0

Branch: refs/heads/branch-2
Commit: e4647da0dbba0e776c54921acadf1f5f6a5f4d81
Parents: 0494671
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:38:16 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e4647da0/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



[1/2] hbase git commit: HBASE-18973 Clean up warnings about pom well-formedness from mvn validate.

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 13a53811d -> e4647da0d


HBASE-18973 Clean up warnings about pom well-formedness from mvn validate.

Signed-off-by: Michael Stack 
Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/04946719
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/04946719
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/04946719

Branch: refs/heads/branch-2
Commit: 0494671983ae1737ff68bde901362aaa3cb577f0
Parents: 13a5381
Author: Sean Busbey 
Authored: Sat Oct 7 21:46:20 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:38:10 2017 -0500

--
 hbase-it/pom.xml   | 6 --
 hbase-rest/pom.xml | 5 -
 pom.xml| 5 +
 3 files changed, 5 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/04946719/hbase-it/pom.xml
--
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index a39459a..a150012 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -276,7 +276,6 @@
 
   org.glassfish.jersey.core
   jersey-client
-  ${jersey.version}
 
 
 
@@ -295,11 +294,6 @@
   junit
   test
 
-
-  org.glassfish.jersey.core
-  jersey-client
-  2.25.1
-
   
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/04946719/hbase-rest/pom.xml
--
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 5d580c1..60bab83 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -338,11 +338,6 @@
findbugs-annotations
true
 
-
-  javax.servlet
-  javax.servlet-api
-  ${servlet.api.version}
-
   
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/04946719/pom.xml
--
diff --git a/pom.xml b/pom.xml
index fc6383a..15929f2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1934,6 +1934,11 @@
 ${jersey.version}
   
   
+org.glassfish.jersey.core
+jersey-client
+${jersey.version}
+  
+  
 org.glassfish.jersey.media
 jersey-media-json-jackson1
 ${jackson.version}



hbase git commit: HBASE-18934 pick hadoop versions for precommit test after patch branch is determined.

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 6302dfaf2 -> 087edf017


HBASE-18934 pick hadoop versions for precommit test after patch branch is 
determined.

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/087edf01
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/087edf01
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/087edf01

Branch: refs/heads/master
Commit: 087edf01728b6bd2508fd8ab7f9123e3c4423959
Parents: 6302dfa
Author: Sean Busbey 
Authored: Thu Oct 5 18:00:42 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:31:55 2017 -0500

--
 dev-support/hbase-personality.sh | 40 +--
 1 file changed, 24 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/087edf01/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index c58e47e..c0386e5 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -36,6 +36,15 @@
 
 personality_plugins "all"
 
+if ! declare -f "yetus_info" >/dev/null; then
+
+  function yetus_info
+  {
+echo "[$(date) INFO]: $*" 1>&2
+  }
+
+fi
+
 ## @description  Globals specific to this personality
 ## @audience private
 ## @stabilityevolving
@@ -51,19 +60,6 @@ function personality_globals
   #shellcheck disable=SC2034
   GITHUB_REPO="apache/hbase"
 
-  # All supported Hadoop versions that we want to test the compilation with
-  # See the Hadoop section on prereqs in the HBase Reference Guide
-  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
-HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS=""
-  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  else # master or a feature branch
-HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
-HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
-  fi
-
   # TODO use PATCH_BRANCH to select jdk versions to use.
 
   # Override the maven options
@@ -244,9 +240,21 @@ function hadoopcheck_rebuild
 
   big_console_header "Compiling against various Hadoop versions"
 
-  hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
-  hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
-
+  # All supported Hadoop versions that we want to test the compilation with
+  # See the Hadoop section on prereqs in the HBase Reference Guide
+  if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-1-ish rules."
+hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 
2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions=""
+  elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
+yetus_info "setting Hadoop versions to test based on branch-2-ish rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  else # master or a feature branch
+yetus_info "setting Hadoop versions to test based on master/feature branch 
rules."
+hbase_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
+hbase_hadoop3_versions="3.0.0-alpha4"
+  fi
 
   export MAVEN_OPTS="${MAVEN_OPTS}"
   for hadoopver in ${hbase_hadoop2_versions}; do



hbase git commit: HBASE-18973 Clean up warnings about pom well-formedness from mvn validate.

2017-10-09 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master e2cef8aa8 -> 6302dfaf2


HBASE-18973 Clean up warnings about pom well-formedness from mvn validate.

Signed-off-by: Michael Stack 
Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6302dfaf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6302dfaf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6302dfaf

Branch: refs/heads/master
Commit: 6302dfaf2b7921604c7a75d7f11de53009ad
Parents: e2cef8a
Author: Sean Busbey 
Authored: Sat Oct 7 21:46:20 2017 -0500
Committer: Sean Busbey 
Committed: Mon Oct 9 11:28:34 2017 -0500

--
 hbase-it/pom.xml   | 6 --
 hbase-rest/pom.xml | 5 -
 pom.xml| 5 +
 3 files changed, 5 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6302dfaf/hbase-it/pom.xml
--
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 6df0b59..111c4cf 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -276,7 +276,6 @@
 
   org.glassfish.jersey.core
   jersey-client
-  ${jersey.version}
 
 
 
@@ -295,11 +294,6 @@
   junit
   test
 
-
-  org.glassfish.jersey.core
-  jersey-client
-  2.25.1
-
   
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/6302dfaf/hbase-rest/pom.xml
--
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 0b18809..0a12573 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -338,11 +338,6 @@
findbugs-annotations
true
 
-
-  javax.servlet
-  javax.servlet-api
-  ${servlet.api.version}
-
   
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6302dfaf/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 272e4d4..26366ff 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1978,6 +1978,11 @@
 ${jersey.version}
   
   
+org.glassfish.jersey.core
+jersey-client
+${jersey.version}
+  
+  
 org.glassfish.jersey.media
 jersey-media-json-jackson1
 ${jackson.version}



hbase-site git commit: INFRA-10751 Empty commit

2017-10-09 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e6ae7c3e1 -> 1afc9484c


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/1afc9484
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/1afc9484
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/1afc9484

Branch: refs/heads/asf-site
Commit: 1afc9484c082f7e69e60c25e545119a73cf381ca
Parents: e6ae7c3
Author: jenkins 
Authored: Mon Oct 9 15:13:04 2017 +
Committer: jenkins 
Committed: Mon Oct 9 15:13:04 2017 +

--

--




[21/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
index df1cfa8..df8ff34 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -1042,132 +1042,132 @@
 1034   * @param includesTag - includesTag or 
not
 1035   * @return Writer for a new StoreFile 
in the tmp dir.
 1036   */
-1037  public StoreFileWriter 
createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,
-1038  boolean isCompaction, boolean 
includeMVCCReadpoint, boolean includesTag,
-1039  boolean shouldDropBehind) throws 
IOException {
-1040return 
createWriterInTmp(maxKeyCount, compression, isCompaction, 
includeMVCCReadpoint,
-1041  includesTag, shouldDropBehind, 
null);
-1042  }
-1043
-1044  /**
-1045   * @param maxKeyCount
-1046   * @param compression Compression 
algorithm to use
-1047   * @param isCompaction whether we are 
creating a new file in a compaction
-1048   * @param includeMVCCReadpoint - 
whether to include MVCC or not
-1049   * @param includesTag - includesTag or 
not
-1050   * @return Writer for a new StoreFile 
in the tmp dir.
-1051   */
-1052  // TODO : allow the Writer factory to 
create Writers of ShipperListener type only in case of
-1053  // compaction
-1054  public StoreFileWriter 
createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,
-1055  boolean isCompaction, boolean 
includeMVCCReadpoint, boolean includesTag,
-1056  boolean shouldDropBehind, 
TimeRangeTracker trt) throws IOException {
-1057final CacheConfig writerCacheConf;
-1058if (isCompaction) {
-1059  // Don't cache data on write on 
compactions.
-1060  writerCacheConf = new 
CacheConfig(cacheConf);
-1061  
writerCacheConf.setCacheDataOnWrite(false);
-1062} else {
-1063  writerCacheConf = cacheConf;
-1064}
-1065InetSocketAddress[] favoredNodes = 
null;
-1066if (region.getRegionServerServices() 
!= null) {
-1067  favoredNodes = 
region.getRegionServerServices().getFavoredNodesForRegion(
-1068  
region.getRegionInfo().getEncodedName());
-1069}
-1070HFileContext hFileContext = 
createFileContext(compression, includeMVCCReadpoint, includesTag,
-1071  cryptoContext);
-1072Path familyTempDir = new 
Path(fs.getTempDir(), family.getNameAsString());
-1073StoreFileWriter.Builder builder = 
new StoreFileWriter.Builder(conf, writerCacheConf,
-1074this.getFileSystem())
-1075
.withOutputDir(familyTempDir)
-1076
.withComparator(comparator)
-1077
.withBloomType(family.getBloomFilterType())
-1078
.withMaxKeyCount(maxKeyCount)
-1079
.withFavoredNodes(favoredNodes)
-1080
.withFileContext(hFileContext)
-1081
.withShouldDropCacheBehind(shouldDropBehind);
-1082if (trt != null) {
-1083  
builder.withTimeRangeTracker(trt);
-1084}
-1085return builder.build();
-1086  }
-1087
-1088  private HFileContext 
createFileContext(Compression.Algorithm compression,
-1089  boolean includeMVCCReadpoint, 
boolean includesTag, Encryption.Context cryptoContext) {
-1090if (compression == null) {
-1091  compression = 
HFile.DEFAULT_COMPRESSION_ALGORITHM;
-1092}
-1093HFileContext hFileContext = new 
HFileContextBuilder()
-1094
.withIncludesMvcc(includeMVCCReadpoint)
-1095
.withIncludesTags(includesTag)
-1096
.withCompression(compression)
-1097
.withCompressTags(family.isCompressTags())
-1098
.withChecksumType(checksumType)
-1099
.withBytesPerCheckSum(bytesPerChecksum)
-1100
.withBlockSize(blocksize)
-1101
.withHBaseCheckSum(true)
-1102
.withDataBlockEncoding(family.getDataBlockEncoding())
-1103
.withEncryptionContext(cryptoContext)
-1104
.withCreateTime(EnvironmentEdgeManager.currentTime())
-1105
.build();
-1106return hFileContext;
-1107  }
-1108
-1109
-1110  private long 
getTotalSize(CollectionHStoreFile sfs) {
-return sfs.stream().mapToLong(sf 
- sf.getReader().length()).sum();
-1112  }
-1113
-1114  /**
-1115   * Change storeFiles adding into place 
the Reader produced by this new flush.
-1116   * @param sfs Store files
-1117   * @param snapshotId
-1118   * @throws IOException
-1119   * @return Whether compaction is 
required.
-1120   */
-1121  private 

[24/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
index 289d65c..0d380e3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class StoreFileWriter.Builder
+public static class StoreFileWriter.Builder
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -177,10 +177,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 private boolean
 shouldDropCacheBehind
 
-
-private TimeRangeTracker
-trt
-
 
 
 
@@ -263,10 +259,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 StoreFileWriter.Builder
 withShouldDropCacheBehind(booleanshouldDropCacheBehind)
 
-
-StoreFileWriter.Builder
-withTimeRangeTracker(TimeRangeTrackertrt)
-
 
 
 
@@ -295,7 +287,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 conf
-private finalorg.apache.hadoop.conf.Configuration conf
+private finalorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -304,7 +296,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 cacheConf
-private finalCacheConfig cacheConf
+private finalCacheConfig cacheConf
 
 
 
@@ -313,7 +305,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fs
-private finalorg.apache.hadoop.fs.FileSystem fs
+private finalorg.apache.hadoop.fs.FileSystem fs
 
 
 
@@ -322,7 +314,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 comparator
-privateCellComparator comparator
+privateCellComparator comparator
 
 
 
@@ -331,7 +323,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 bloomType
-privateBloomType bloomType
+privateBloomType bloomType
 
 
 
@@ -340,7 +332,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 maxKeyCount
-privatelong maxKeyCount
+privatelong maxKeyCount
 
 
 
@@ -349,7 +341,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 dir
-privateorg.apache.hadoop.fs.Path dir
+privateorg.apache.hadoop.fs.Path dir
 
 
 
@@ -358,7 +350,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 filePath
-privateorg.apache.hadoop.fs.Path filePath
+privateorg.apache.hadoop.fs.Path filePath
 
 
 
@@ -367,7 +359,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 favoredNodes
-privatehttp://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddress[] favoredNodes
+privatehttp://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddress[] favoredNodes
 
 
 
@@ -376,16 +368,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fileContext
-privateHFileContext fileContext
-
-
-
-
-
-
-
-trt
-privateTimeRangeTracker trt
+privateHFileContext fileContext
 
 
 
@@ -394,7 +377,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 shouldDropCacheBehind
-privateboolean shouldDropCacheBehind
+privateboolean shouldDropCacheBehind
 
 
 
@@ -411,7 +394,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Builder
-publicBuilder(org.apache.hadoop.conf.Configurationconf,
+publicBuilder(org.apache.hadoop.conf.Configurationconf,
CacheConfigcacheConf,
org.apache.hadoop.fs.FileSystemfs)
 
@@ -422,7 +405,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Builder
-publicBuilder(org.apache.hadoop.conf.Configurationconf,
+publicBuilder(org.apache.hadoop.conf.Configurationconf,
org.apache.hadoop.fs.FileSystemfs)
 Creates Builder with cache configuration disabled
 
@@ -435,29 +418,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Method Detail
-
-
-
-
-
-withTimeRangeTracker
-publicStoreFileWriter.BuilderwithTimeRangeTracker(TimeRangeTrackertrt)
-
-Parameters:
-trt - A premade 

[07/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+572//flush

[14/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
index d5471b4..2d3ecb6 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.MyList.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class TestHStore.MyListT
+private static class TestHStore.MyListT
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListT
 
@@ -323,7 +323,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 delegatee
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListT delegatee
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListT delegatee
 
 
 
@@ -332,7 +332,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 hookAtAdd
-private finalTestHStore.MyListHook hookAtAdd
+private finalTestHStore.MyListHook hookAtAdd
 
 
 
@@ -349,7 +349,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 MyList
-MyList(TestHStore.MyListHookhookAtAdd)
+MyList(TestHStore.MyListHookhookAtAdd)
 
 
 
@@ -366,7 +366,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 size
-publicintsize()
+publicintsize()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true#size--;
 title="class or interface in java.util">sizein 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionT
@@ -381,7 +381,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 isEmpty
-publicbooleanisEmpty()
+publicbooleanisEmpty()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true#isEmpty--;
 title="class or interface in java.util">isEmptyin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionT
@@ -396,7 +396,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 contains
-publicbooleancontains(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objecto)
+publicbooleancontains(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objecto)
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true#contains-java.lang.Object-;
 title="class or interface in java.util">containsin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionT
@@ -411,7 +411,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 iterator
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTiterator()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTiterator()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true#iterator--;
 title="class or interface in java.lang">iteratorin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true;
 title="class or interface in java.lang">IterableT
@@ -428,7 +428,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 toArray
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]toArray()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]toArray()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true#toArray--;
 title="class or interface in java.util">toArrayin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in java.util">CollectionT
@@ -445,7 +445,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html
 
 
 toArray

[29/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/e6ae7c3e
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/e6ae7c3e
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/e6ae7c3e

Branch: refs/heads/asf-site
Commit: e6ae7c3e1820c6e4ffcec54548f9acbaeb251db4
Parents: dc3684f
Author: jenkins 
Authored: Mon Oct 9 15:12:30 2017 +
Committer: jenkins 
Committed: Mon Oct 9 15:12:30 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 6 +-
 .../hbase/quotas/SpaceLimitingException.html|13 +-
 .../hbase/quotas/SpaceLimitingException.html|   142 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 11962 -
 checkstyle.rss  | 6 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html | 6 +-
 devapidocs/index-all.html   |16 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/class-use/CellComparator.html  |19 +-
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../class-use/Compression.Algorithm.html|12 +-
 .../hbase/io/hfile/class-use/CacheConfig.html   |15 -
 .../hbase/io/hfile/class-use/HFileContext.html  |15 -
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../master/cleaner/ReplicationMetaCleaner.html  | 2 +-
 .../hadoop/hbase/master/package-tree.html   | 6 +-
 .../hadoop/hbase/monitoring/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |18 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../hbase/quotas/SpaceLimitingException.html|23 +-
 .../hadoop/hbase/quotas/package-tree.html   | 6 +-
 .../hadoop/hbase/regionserver/HMobStore.html| 2 +-
 .../regionserver/HStore.StoreFlusherImpl.html   |32 +-
 .../hadoop/hbase/regionserver/HStore.html   |   458 +-
 .../regionserver/StoreFileWriter.Builder.html   |81 +-
 .../hbase/regionserver/StoreFileWriter.html |   120 +-
 ...StoreFlusher.BoundaryStripeFlushRequest.html | 8 +-
 ...ripeStoreFlusher.SizeStripeFlushRequest.html |10 +-
 .../StripeStoreFlusher.StripeFlushRequest.html  | 8 +-
 .../hbase/regionserver/StripeStoreFlusher.html  | 8 +-
 .../AbstractMultiFileWriter.WriterFactory.html  | 3 +-
 .../hbase/regionserver/class-use/BloomType.html |15 -
 .../class-use/StoreFileWriter.Builder.html  | 4 -
 .../regionserver/class-use/StoreFileWriter.html |12 +-
 .../class-use/TimeRangeTracker.html |40 +-
 .../hadoop/hbase/regionserver/package-tree.html |16 +-
 .../regionserver/querymatcher/package-tree.html | 4 +-
 .../hbase/security/access/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../apache/hadoop/hbase/util/package-tree.html  | 8 +-
 .../org/apache/hadoop/hbase/Version.html| 6 +-
 .../master/cleaner/ReplicationMetaCleaner.html  |   204 +-
 .../hbase/mob/DefaultMobStoreFlusher.html   | 2 +-
 .../hbase/quotas/SpaceLimitingException.html|   142 +-
 .../hbase/regionserver/DefaultStoreFlusher.html |55 +-
 .../regionserver/HStore.StoreFlusherImpl.html   |  3036 +++--
 .../hadoop/hbase/regionserver/HStore.html   |  3036 +++--
 .../regionserver/StoreFileWriter.Builder.html   |   853 +-
 .../hbase/regionserver/StoreFileWriter.html |   853 +-
 ...StoreFlusher.BoundaryStripeFlushRequest.html |   203 +-
 ...ripeStoreFlusher.SizeStripeFlushRequest.html |   203 +-
 .../StripeStoreFlusher.StripeFlushRequest.html  |   203 +-
 .../hbase/regionserver/StripeStoreFlusher.html  |   203 +-
 export_control.html | 4 +-
 hbase-annotations/dependencies.html | 4 +-
 hbase-annotations/dependency-convergence.html   | 4 +-
 hbase-annotations/dependency-info.html  | 4 +-
 hbase-annotations/dependency-management.html| 4 +-
 hbase-annotations/index.html  

[23/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
index 78b6de1..05c0c11 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/TimeRangeTracker.html
@@ -120,7 +120,7 @@
 
 
 
-(package private) TimeRangeTracker
+private TimeRangeTracker
 StoreFileWriter.timeRangeTracker
 
 
@@ -135,10 +135,6 @@
 private TimeRangeTracker
 CompositeImmutableSegment.timeRangeTracker
 
-
-private TimeRangeTracker
-StoreFileWriter.Builder.trt
-
 
 
 
@@ -193,25 +189,6 @@
 TimeRangeTracker.create(TimeRangeTracker.Typetype,
   TimeRangeTrackertrt)
 
-
-private AbstractMultiFileWriter.WriterFactory
-StripeStoreFlusher.createWriterFactory(TimeRangeTrackertracker,
-   longkvCount)
-
-
-StoreFileWriter
-HStore.createWriterInTmp(longmaxKeyCount,
- Compression.Algorithmcompression,
- booleanisCompaction,
- booleanincludeMVCCReadpoint,
- booleanincludesTag,
- booleanshouldDropBehind,
- TimeRangeTrackertrt)
-
-
-StoreFileWriter.Builder
-StoreFileWriter.Builder.withTimeRangeTracker(TimeRangeTrackertrt)
-
 
 
 
@@ -224,21 +201,6 @@
 NonSyncTimeRangeTracker(TimeRangeTrackertrt)
 
 
-StoreFileWriter(org.apache.hadoop.fs.FileSystemfs,
-   org.apache.hadoop.fs.Pathpath,
-   org.apache.hadoop.conf.Configurationconf,
-   CacheConfigcacheConf,
-   CellComparatorcomparator,
-   BloomTypebloomType,
-   longmaxKeys,
-   http://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in 
java.net">InetSocketAddress[]favoredNodes,
-   HFileContextfileContext,
-   booleanshouldDropCacheBehind,
-   TimeRangeTrackertrt)
-Creates an HFile.Writer that also write helpful meta 
data.
-
-
-
 SyncTimeRangeTracker(TimeRangeTrackertrt)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 0c8238a..e4b7588 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -685,18 +685,18 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.regionserver.ScanType
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
-org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
-org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
-org.apache.hadoop.hbase.regionserver.FlushType
-org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
 org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.MemStoreCompactor.Action
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
 org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.regionserver.MemStoreCompactor.Action
+org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
 org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
+org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
+org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.FlushType
+org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 

[06/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyListHook.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyListHook.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyListHook.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyListHook.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyListHook.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), 

[10/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new 

[13/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.html 
b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.html
index ae966fe..c109cf1 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":9,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -577,6 +577,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 void
 testSwitchingPreadtoStreamParallelyWithCompactionDischarger()
 
+
+void
+testTimeRangeIfSomeCellsAreDroppedInFlush()
+
+
+private void
+testTimeRangeIfSomeCellsAreDroppedInFlush(intmaxVersion)
+
 
 
 
@@ -1078,13 +1086,41 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
+
+
+
+
+
+testTimeRangeIfSomeCellsAreDroppedInFlush
+publicvoidtestTimeRangeIfSomeCellsAreDroppedInFlush()
+   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+Throws:
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+
+
+
+
+
+
+
+testTimeRangeIfSomeCellsAreDroppedInFlush
+privatevoidtestTimeRangeIfSomeCellsAreDroppedInFlush(intmaxVersion)
+throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+Throws:
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+
+
+
 
 
 
 
 
 testGet_FromFilesOnly
-publicvoidtestGet_FromFilesOnly()
+publicvoidtestGet_FromFilesOnly()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting data from files only
 
@@ -1099,7 +1135,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 testGet_FromMemStoreAndFiles
-publicvoidtestGet_FromMemStoreAndFiles()
+publicvoidtestGet_FromMemStoreAndFiles()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Getting data from memstore and files
 
@@ -1114,7 +1150,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 flush
-privatevoidflush(intstoreFilessize)
+privatevoidflush(intstoreFilessize)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -1128,7 +1164,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 assertCheck
-privatevoidassertCheck()
+privatevoidassertCheck()
 
 
 
@@ -1137,7 +1173,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tearDown
-publicvoidtearDown()
+publicvoidtearDown()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -1151,7 +1187,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 tearDownAfterClass
-public staticvoidtearDownAfterClass()
+public staticvoidtearDownAfterClass()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -1165,7 

[11/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 

[12/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.DummyStoreEngine.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.DummyStoreEngine.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.DummyStoreEngine.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.DummyStoreEngine.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.DummyStoreEngine.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 

[16/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugin-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugin-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugin-management.html
index 417bf72..9a09f9f 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugin-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Plugin Management
 
@@ -271,7 +271,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugins.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugins.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugins.html
index be41474..b61d6ac 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugins.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Plugins
 
@@ -226,7 +226,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-info.html
index ca269bd..a75256e 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Information
 
@@ -167,7 +167,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
index 90e7188..f912a68 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-reports.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Generated Reports
 
@@ -128,7 +128,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
index ff7eb7c..57583f0 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/project-summary.html
@@ -7,7 +7,7 @@
   

[08/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStoreWithCustomCompactor.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStoreWithCustomCompactor.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStoreWithCustomCompactor.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStoreWithCustomCompactor.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStoreWithCustomCompactor.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570

[02/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyThread.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyThread.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyThread.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyThread.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyThread.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+572

[04/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStore.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+572

[05/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyMemStoreCompactor.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyMemStoreCompactor.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyMemStoreCompactor.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyMemStoreCompactor.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyMemStoreCompactor.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new 

[20/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
index 02426b3..679955d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.Builder.html
@@ -79,462 +79,413 @@
 071  private long deleteFamilyCnt = 0;
 072  private BloomContext bloomContext = 
null;
 073  private BloomContext 
deleteFamilyBloomContext = null;
-074
-075  /**
-076   * timeRangeTrackerSet is used to 
figure if we were passed a filled-out TimeRangeTracker or not.
-077   * When flushing a memstore, we set the 
TimeRangeTracker that it accumulated during updates to
-078   * memstore in here into this Writer 
and use this variable to indicate that we do not need to
-079   * recalculate the timeRangeTracker 
bounds; it was done already as part of add-to-memstore.
-080   * A completed TimeRangeTracker is not 
set in cases of compactions when it is recalculated.
-081   */
-082   private final boolean 
timeRangeTrackerSet;
-083   final TimeRangeTracker 
timeRangeTracker;
-084
-085  protected HFile.Writer writer;
-086
-087  /**
-088   * Creates an HFile.Writer that also 
write helpful meta data.
-089   * @param fs file system to write to
-090   * @param path file name to create
-091   * @param conf user configuration
-092   * @param comparator key comparator
-093   * @param bloomType bloom filter 
setting
-094   * @param maxKeys the expected maximum 
number of keys to be added. Was used
-095   *for Bloom filter size in 
{@link HFile} format version 1.
-096   * @param fileContext - The HFile 
context
-097   * @param shouldDropCacheBehind Drop 
pages written to page cache after writing the store file.
-098   * @throws IOException problem writing 
to FS
-099   */
-100  StoreFileWriter(FileSystem fs, Path 
path, final Configuration conf, CacheConfig cacheConf,
-101  final CellComparator comparator, 
BloomType bloomType, long maxKeys,
-102  InetSocketAddress[] favoredNodes, 
HFileContext fileContext, boolean shouldDropCacheBehind)
-103  throws IOException {
-104  this(fs, path, conf, cacheConf, 
comparator, bloomType, maxKeys, favoredNodes, fileContext,
-105  shouldDropCacheBehind, null);
-106}
-107
-108/**
-109 * Creates an HFile.Writer that also 
write helpful meta data.
-110 * @param fs file system to write 
to
-111 * @param path file name to create
-112 * @param conf user configuration
-113 * @param comparator key comparator
-114 * @param bloomType bloom filter 
setting
-115 * @param maxKeys the expected 
maximum number of keys to be added. Was used
-116 *for Bloom filter size in 
{@link HFile} format version 1.
-117 * @param favoredNodes
-118 * @param fileContext - The HFile 
context
-119 * @param shouldDropCacheBehind Drop 
pages written to page cache after writing the store file.
-120 * @param trt Ready-made timetracker 
to use.
-121 * @throws IOException problem 
writing to FS
-122 */
-123private StoreFileWriter(FileSystem 
fs, Path path,
-124final Configuration conf,
-125CacheConfig cacheConf,
-126final CellComparator comparator, 
BloomType bloomType, long maxKeys,
-127InetSocketAddress[] favoredNodes, 
HFileContext fileContext,
-128boolean shouldDropCacheBehind, 
final TimeRangeTracker trt)
-129throws IOException {
-130// If passed a TimeRangeTracker, use 
it. Set timeRangeTrackerSet so we don't destroy it.
-131// TODO: put the state of the TRT on 
the TRT; i.e. make a read-only version (TimeRange) when
-132// it no longer writable.
-133this.timeRangeTrackerSet = trt != 
null;
-134this.timeRangeTracker = 
this.timeRangeTrackerSet? trt: 
TimeRangeTracker.create(TimeRangeTracker.Type.NON_SYNC);
-135// TODO : Change all writers to be 
specifically created for compaction context
-136writer = HFile.getWriterFactory(conf, 
cacheConf)
-137.withPath(fs, path)
-138.withComparator(comparator)
-139.withFavoredNodes(favoredNodes)
-140.withFileContext(fileContext)
-141
.withShouldDropCacheBehind(shouldDropCacheBehind)
-142.create();
-143
-144generalBloomFilterWriter = 
BloomFilterFactory.createGeneralBloomAtWrite(
-145conf, cacheConf, bloomType,
-146(int) Math.min(maxKeys, 
Integer.MAX_VALUE), writer);
-147
-148if (generalBloomFilterWriter != null) 
{
-149  this.bloomType = bloomType;
-150  if (LOG.isTraceEnabled()) {
-151LOG.trace("Bloom filter type for 
" + path + ": " + this.bloomType + ", " +
-152 

[26/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 95e895f..2871360 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -202,12 +202,12 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
-org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.ThrottleType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
index 8fd3c00..61ef9bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HMobStore.html
@@ -472,7 +472,7 @@ extends HStore
-add,
 add,
 addChangedReaderObserver,
 areWritesEnabled,
 assertBulkLoadHFileOk,
 bulkLoadHFile,
 bulkLoadHFile,
 cancelRequestedCompaction,
 canSplit,
 close,
 closeAndArchiveCompactedFiles,
 compact,
 compactRecentForTestingAssumingDefaultPolicy,
 completeCompaction,
 createFlushContext,
 createWriterInTmp,
 createWriterInTmp,
 deleteChangedR
 eaderObserver, deregisterChildren,
 determineTTLFromFamily,
 flushCache,
 getAvgStoreFileAge,
 getBlockingFileCount,
 getBytesPerChecksum, getCacheConfig,
 getChecksumType,
 getCloseCheckInterval,
 getColumnFamilyDescriptor,
 getColumnFamilyName,
 getCompactedCellsCount,
 getCompactedCellsSize,
 getCompactedFiles, getCompactedFilesCount,
 getCompactionCheckMultiplier,
 getCompactionPressure,
 getCompactionProgress,
 getCompactPriority,
 getComparator,
 getCoprocessorHost,
 getDataBlockEncoder, getFileSystem,
 getFlushableSize,
 getFlushedCellsCount,
 getFlushedCellsSize,
 getFlushedOutputFileSize,
 getHFilesSize,
 getHRegion,
 getLastCompactSize,
 getMajorCompactedCellsCount,
 getMajorCompactedCellsSize,
 getMaxMemStoreTS,
 getMaxSequenceId,
 getMaxStoreFileAge,
 getMemStoreFlushSize,
 getMemStoreSize,
 getMinStoreFileAge,
 getNumHFiles, getNumReferenceFiles,
 getOffPeakHours,
 getRegionFileSystem,
 getRegionInfo,
 getScanInfo,
 getScanner,
 getScanners, getScanners,
 getScanners,
 getScanners,
 getSize,
 getSmallestReadPoint,
 getSnapshotSize,
 getSplitPoint,
 getStoreEngine,
 getStorefiles,
 getStorefilesCount,
 getStorefilesIndexSize,
 getStorefilesSize,
 getStoreFileTtl,
 getStoreHomedir,
 getStoreHomedir,
 getStoreSizeUncompressed,
 getTableName,
 getTotalStaticBloomSize,
 getTotalStaticIndexSize,
 hasReferences,
 hasTooManyStoreFiles,
 heapSize, isPrimaryReplicaStore,
 isSloppyMemStore,
 moveFileIntoPlace,
 needsCompaction,
 onConfigurationChange,
 postSnapshotOperation,
 preBulkLoadHFile,
 preFlushSeqIDEstimation,
 preSnapshotOperation,
 recreateScanners,
 refreshStoreFiles,
 refreshStoreFiles,
 registerChildren,
 replaceStoreFiles,
 replayCompactionMarker,
 requestCompaction,
 requestCompaction,
 setDataBlockEncoderInTest,
 setScanInfo, shouldPerformMajorCompaction,
 snapshot,
 startReplayingFromWAL,
 stopReplayingFromWAL,
 throttleCompaction,
 timeOfOldestEdit,
 toString,
 triggerMajorCompaction,
 upsert,
 versionsToReturn
+add,
 add,
 addChangedReaderObserver,
 areWritesEnabled,
 assertBulkLoadHFileOk,
 bulkLoadHFile,
 bulkLoadHFile,
 cancelRequestedCompaction,
 canSplit,
 close,
 closeAndArchiveCompactedFiles,
 compact,
 compactRecentForTestingAssumingDefaultPolicy,
 completeCompaction,
 createFlushContext,
 createWriterInTmp,
 deleteChangedReaderObserver,
 deregisterChildren,
 determineTTLFromFamily,
 flushCache,
 getAvgStoreFileAge,
 getBlockingFileCount,
 getBytesPerChecksum,
 getCacheConfig,
 getChecksumType, getCloseCheckInterval,
 getColumnFamilyDescriptor,
 getColumnFamilyName,
 getCompactedCellsCount,
 getCompactedCellsSize,
 getCompactedFiles,
 getCompactedFilesCount,
 getCompactionCheckMultiplier, 

[27/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 03194db..3a6faab 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2017 The Apache Software Foundation
 
   File: 2054,
- Errors: 13741,
+ Errors: 13736,
  Warnings: 0,
  Infos: 0
   
@@ -993,7 +993,7 @@ under the License.
   0
 
 
-  60
+  59
 
   
   
@@ -22917,7 +22917,7 @@ under the License.
   0
 
 
-  17
+  13
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/coc.html
--
diff --git a/coc.html b/coc.html
index 7f6af4e..ec07f4a 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 4cdda9d..88864b4 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index ae30fc0..9cd9d87 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -445,7 +445,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 8fb11d0..4498971 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -894,7 +894,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 3ef2f97..a16d621 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -318,7 +318,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index ddf771c..c5d3159 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -936,7 +936,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  

[15/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/license.html
--
diff --git a/license.html b/license.html
index d34afb1..76c43c3 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Licenses
 
@@ -496,7 +496,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/mail-lists.html
--
diff --git a/mail-lists.html b/mail-lists.html
index f3cb317..6a0840a 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Mailing Lists
 
@@ -346,7 +346,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/metrics.html
--
diff --git a/metrics.html b/metrics.html
index 0d9d369..d1a3e36 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) Metrics
@@ -464,7 +464,7 @@ export HBASE_REGIONSERVER_OPTS=$HBASE_JMX_OPTS 
-Dcom.sun.management.jmxrem
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/old_news.html
--
diff --git a/old_news.html b/old_news.html
index 09f479d..66508c2 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Old Apache HBase (TM) News
@@ -419,7 +419,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/plugin-management.html
--
diff --git a/plugin-management.html b/plugin-management.html
index a91fc92..b1eb3fc 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Plugin Management
 
@@ -441,7 +441,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/plugins.html
--
diff --git a/plugins.html b/plugins.html
index 512dd7a..64d074a 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Plugins
 
@@ -380,7 +380,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/poweredbyhbase.html
--
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 0650abf..df0888a 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Powered By Apache HBase™
 
@@ -774,7 +774,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/project-info.html
--
diff --git a/project-info.html b/project-info.html
index 

[17/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/index.html
--
diff --git a/hbase-annotations/index.html b/hbase-annotations/index.html
index 3138d8a..097ae06 100644
--- a/hbase-annotations/index.html
+++ b/hbase-annotations/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/integration.html
--
diff --git a/hbase-annotations/integration.html 
b/hbase-annotations/integration.html
index 656ce4a..30111fb 100644
--- a/hbase-annotations/integration.html
+++ b/hbase-annotations/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  CI Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/issue-tracking.html
--
diff --git a/hbase-annotations/issue-tracking.html 
b/hbase-annotations/issue-tracking.html
index b2c90f4..c2aee3f 100644
--- a/hbase-annotations/issue-tracking.html
+++ b/hbase-annotations/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Issue Management
 
@@ -123,7 +123,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/license.html
--
diff --git a/hbase-annotations/license.html b/hbase-annotations/license.html
index 2b20221..bb4e654 100644
--- a/hbase-annotations/license.html
+++ b/hbase-annotations/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Licenses
 
@@ -326,7 +326,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/mail-lists.html
--
diff --git a/hbase-annotations/mail-lists.html 
b/hbase-annotations/mail-lists.html
index 3afb186..7a9a30e 100644
--- a/hbase-annotations/mail-lists.html
+++ b/hbase-annotations/mail-lists.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Mailing Lists
 
@@ -176,7 +176,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/plugin-management.html
--
diff --git a/hbase-annotations/plugin-management.html 
b/hbase-annotations/plugin-management.html
index 94c6e46..11562d3 100644
--- a/hbase-annotations/plugin-management.html
+++ b/hbase-annotations/plugin-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Plugin 
Management
 
@@ -271,7 +271,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-10-08
+  Last Published: 
2017-10-09
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/hbase-annotations/plugins.html
--
diff --git a/hbase-annotations/plugins.html b/hbase-annotations/plugins.html
index d3b2c5e..fc1075d 100644
--- a/hbase-annotations/plugins.html
+++ b/hbase-annotations/plugins.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 

[22/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
index df1cfa8..df8ff34 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HStore.StoreFlusherImpl.html
@@ -1042,132 +1042,132 @@
 1034   * @param includesTag - includesTag or 
not
 1035   * @return Writer for a new StoreFile 
in the tmp dir.
 1036   */
-1037  public StoreFileWriter 
createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,
-1038  boolean isCompaction, boolean 
includeMVCCReadpoint, boolean includesTag,
-1039  boolean shouldDropBehind) throws 
IOException {
-1040return 
createWriterInTmp(maxKeyCount, compression, isCompaction, 
includeMVCCReadpoint,
-1041  includesTag, shouldDropBehind, 
null);
-1042  }
-1043
-1044  /**
-1045   * @param maxKeyCount
-1046   * @param compression Compression 
algorithm to use
-1047   * @param isCompaction whether we are 
creating a new file in a compaction
-1048   * @param includeMVCCReadpoint - 
whether to include MVCC or not
-1049   * @param includesTag - includesTag or 
not
-1050   * @return Writer for a new StoreFile 
in the tmp dir.
-1051   */
-1052  // TODO : allow the Writer factory to 
create Writers of ShipperListener type only in case of
-1053  // compaction
-1054  public StoreFileWriter 
createWriterInTmp(long maxKeyCount, Compression.Algorithm compression,
-1055  boolean isCompaction, boolean 
includeMVCCReadpoint, boolean includesTag,
-1056  boolean shouldDropBehind, 
TimeRangeTracker trt) throws IOException {
-1057final CacheConfig writerCacheConf;
-1058if (isCompaction) {
-1059  // Don't cache data on write on 
compactions.
-1060  writerCacheConf = new 
CacheConfig(cacheConf);
-1061  
writerCacheConf.setCacheDataOnWrite(false);
-1062} else {
-1063  writerCacheConf = cacheConf;
-1064}
-1065InetSocketAddress[] favoredNodes = 
null;
-1066if (region.getRegionServerServices() 
!= null) {
-1067  favoredNodes = 
region.getRegionServerServices().getFavoredNodesForRegion(
-1068  
region.getRegionInfo().getEncodedName());
-1069}
-1070HFileContext hFileContext = 
createFileContext(compression, includeMVCCReadpoint, includesTag,
-1071  cryptoContext);
-1072Path familyTempDir = new 
Path(fs.getTempDir(), family.getNameAsString());
-1073StoreFileWriter.Builder builder = 
new StoreFileWriter.Builder(conf, writerCacheConf,
-1074this.getFileSystem())
-1075
.withOutputDir(familyTempDir)
-1076
.withComparator(comparator)
-1077
.withBloomType(family.getBloomFilterType())
-1078
.withMaxKeyCount(maxKeyCount)
-1079
.withFavoredNodes(favoredNodes)
-1080
.withFileContext(hFileContext)
-1081
.withShouldDropCacheBehind(shouldDropBehind);
-1082if (trt != null) {
-1083  
builder.withTimeRangeTracker(trt);
-1084}
-1085return builder.build();
-1086  }
-1087
-1088  private HFileContext 
createFileContext(Compression.Algorithm compression,
-1089  boolean includeMVCCReadpoint, 
boolean includesTag, Encryption.Context cryptoContext) {
-1090if (compression == null) {
-1091  compression = 
HFile.DEFAULT_COMPRESSION_ALGORITHM;
-1092}
-1093HFileContext hFileContext = new 
HFileContextBuilder()
-1094
.withIncludesMvcc(includeMVCCReadpoint)
-1095
.withIncludesTags(includesTag)
-1096
.withCompression(compression)
-1097
.withCompressTags(family.isCompressTags())
-1098
.withChecksumType(checksumType)
-1099
.withBytesPerCheckSum(bytesPerChecksum)
-1100
.withBlockSize(blocksize)
-1101
.withHBaseCheckSum(true)
-1102
.withDataBlockEncoding(family.getDataBlockEncoding())
-1103
.withEncryptionContext(cryptoContext)
-1104
.withCreateTime(EnvironmentEdgeManager.currentTime())
-1105
.build();
-1106return hFileContext;
-1107  }
-1108
-1109
-1110  private long 
getTotalSize(CollectionHStoreFile sfs) {
-return sfs.stream().mapToLong(sf 
- sf.getReader().length()).sum();
-1112  }
-1113
-1114  /**
-1115   * Change storeFiles adding into place 
the Reader produced by this new flush.
-1116   * @param sfs Store files
-1117   * @param snapshotId
-1118   * @throws 

[09/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStore.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStore.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStore.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStore.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyCompactingMemStore.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new 

[25/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index a0b7dbf..0382282 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":9,"i30":10,"i31":10,"i32":10,"i33":10,"i34":9,"i35":10,"i36":9,"i37":9,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":41,"i92":41,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109"
 
:10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":9,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":10,"i35":9,"i36":9,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":41,"i91":41,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109"
 
:10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -564,34 +564,24 @@ implements 
-StoreFileWriter
-createWriterInTmp(longmaxKeyCount,
- Compression.Algorithmcompression,
- booleanisCompaction,
- booleanincludeMVCCReadpoint,
- booleanincludesTag,
- booleanshouldDropBehind,
- TimeRangeTrackertrt)
-
-
 void
 deleteChangedReaderObserver(ChangedReadersObservero)
 
-
+
 void
 deregisterChildren(ConfigurationManagermanager)
 Needs to be called to deregister the children from the 
manager.
 
 
-
+
 static long
 determineTTLFromFamily(ColumnFamilyDescriptorfamily)
 
-
+
 private void
 finishCompactionRequest(CompactionRequestImplcr)
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Path
 flushCache(longlogCacheFlushId,
   MemStoreSnapshotsnapshot,
@@ -600,183 +590,183 @@ implements Write out current snapshot.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/OptionalDouble.html?is-external=true;
 title="class or interface in java.util">OptionalDouble
 getAvgStoreFileAge()
 
-
+
 long
 getBlockingFileCount()
 The number of files required before flushes for this store 
will be blocked.
 
 
-
+
 static int
 getBytesPerChecksum(org.apache.hadoop.conf.Configurationconf)
 Returns the configured bytesPerChecksum value.
 
 
-
+
 CacheConfig
 getCacheConfig()
 Used for tests.
 
 
-
+
 static ChecksumType
 getChecksumType(org.apache.hadoop.conf.Configurationconf)
 Returns the configured checksum algorithm.
 
 
-
+
 static int
 

[28/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index fecc90c..1ba7560 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 2054
 0
 0
-13741
+13736
 
 Files
 
@@ -5057,7 +5057,7 @@
 org/apache/hadoop/hbase/regionserver/HStore.java
 0
 0
-60
+59
 
 org/apache/hadoop/hbase/regionserver/HStoreFile.java
 0
@@ -5512,7 +5512,7 @@
 org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
 0
 0
-17
+13
 
 org/apache/hadoop/hbase/regionserver/StoreFlushContext.java
 0
@@ -8239,7 +8239,7 @@
 
 design
 http://checkstyle.sourceforge.net/config_design.html#FinalClass;>FinalClass
-62
+63
 Error
 
 
@@ -8294,19 +8294,19 @@
 caseIndent: 2
 basicOffset: 2
 lineWrappingIndentation: 2
-3797
+3794
 Error
 
 javadoc
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation;>JavadocTagContinuationIndentation
 
 offset: 2
-766
+765
 Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription
-3282
+3281
 Error
 
 misc
@@ -8324,7 +8324,7 @@
 
 max: 100
 ignorePattern: ^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated
-1123
+1122
 Error
 
 
@@ -67699,179 +67699,173 @@
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1045
+1099
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1117
+1100
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1118
+1132
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1150
+1327
 
 Error
-javadoc
-NonEmptyAtclauseDescription
-At-clause should have a non-empty description.
-1345
-
-Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
-1420
-
+1402
+
 Error
 sizes
 LineLength
 Line is longer than 100 characters (found 114).
-1459
-
+1441
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1522
-
+1504
+
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1750
-
+1732
+
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1760
-
+1742
+
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1780
-
+1762
+
 Error
 indentation
 Indentation
 'if' have incorrect indentation level 10, expected level should be 8.
-1875
-
+1857
+
 Error
 indentation
 Indentation
 'if' child have incorrect indentation level 12, expected level should be 
10.
-1876
-
+1858
+
 Error
 indentation
 Indentation
 'if rcurly' have incorrect indentation level 10, expected level should be 
8.
-1877
-
-Error
-javadoc
-NonEmptyAtclauseDescription
-At-clause should have a non-empty description.
-1975
+1859
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-2169
+1957
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-2171
+2151
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-2172
+2153
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-2281
+2154
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-2311
+2263
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-2511
+2293
 
 Error
+javadoc
+NonEmptyAtclauseDescription
+At-clause should have a non-empty description.
+2493
+
+Error
 sizes
 LineLength
 Line is longer than 100 characters (found 102).
-2553
+2535
 
 org/apache/hadoop/hbase/regionserver/HStoreFile.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
 Error
 imports
 ImportOrder
 Wrong order for 
'org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting'
 import.
 45
-
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 201
-
+
 Error
 sizes
 LineLength
 Line is longer than 100 characters (found 101).
 221
-
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 291
-
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 341
-
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
 511
-
+
 Error
 javadoc
 NonEmptyAtclauseDescription
@@ -67880,103 +67874,103 @@
 
 org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
 
-
+
 Severity
 Category
 Rule
 Message
 Line
-
+
 Error
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.io.hfile.CacheConfig' import.
 36
-
+
 Error
 imports
 ImportOrder
 Wrong order for 

[01/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site dc3684f7c -> e6ae7c3e1


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 

[18/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.BoundaryStripeFlushRequest.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.BoundaryStripeFlushRequest.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.BoundaryStripeFlushRequest.html
index 5dc6280..e5972e6 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.BoundaryStripeFlushRequest.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.BoundaryStripeFlushRequest.html
@@ -83,110 +83,107 @@
 075StripeMultiFileWriter mw = null;
 076try {
 077  mw = req.createWriter(); // Writer 
according to the policy.
-078  StripeMultiFileWriter.WriterFactory 
factory = createWriterFactory(
-079  snapshot.getTimeRangeTracker(), 
cellsCount);
-080  StoreScanner storeScanner = 
(scanner instanceof StoreScanner) ? (StoreScanner)scanner : null;
-081  mw.init(storeScanner, factory);
-082
-083  synchronized (flushLock) {
-084performFlush(scanner, mw, 
smallestReadPoint, throughputController);
-085result = 
mw.commitWriters(cacheFlushSeqNum, false);
-086success = true;
-087  }
-088} finally {
-089  if (!success  (mw != 
null)) {
-090for (Path leftoverFile : 
mw.abortWriters()) {
-091  try {
-092
store.getFileSystem().delete(leftoverFile, false);
-093  } catch (Exception e) {
-094LOG.error("Failed to delete a 
file after failed flush: " + e);
-095  }
-096}
-097  }
-098  try {
-099scanner.close();
-100  } catch (IOException ex) {
-101LOG.warn("Failed to close flush 
scanner, ignoring", ex);
-102  }
-103}
-104return result;
-105  }
-106
-107  private 
StripeMultiFileWriter.WriterFactory createWriterFactory(
-108  final TimeRangeTracker tracker, 
final long kvCount) {
-109return new 
StripeMultiFileWriter.WriterFactory() {
-110  @Override
-111  public StoreFileWriter 
createWriter() throws IOException {
-112StoreFileWriter writer = 
store.createWriterInTmp(
-113kvCount, 
store.getColumnFamilyDescriptor().getCompressionType(),
-114/* isCompaction = */ false,
-115/* includeMVCCReadpoint = */ 
true,
-116/* includesTags = */ true,
-117/* shouldDropBehind = */ 
false,
-118tracker);
-119return writer;
-120  }
-121};
-122  }
+078  StripeMultiFileWriter.WriterFactory 
factory = createWriterFactory(cellsCount);
+079  StoreScanner storeScanner = 
(scanner instanceof StoreScanner) ? (StoreScanner)scanner : null;
+080  mw.init(storeScanner, factory);
+081
+082  synchronized (flushLock) {
+083performFlush(scanner, mw, 
smallestReadPoint, throughputController);
+084result = 
mw.commitWriters(cacheFlushSeqNum, false);
+085success = true;
+086  }
+087} finally {
+088  if (!success  (mw != 
null)) {
+089for (Path leftoverFile : 
mw.abortWriters()) {
+090  try {
+091
store.getFileSystem().delete(leftoverFile, false);
+092  } catch (Exception e) {
+093LOG.error("Failed to delete a 
file after failed flush: " + e);
+094  }
+095}
+096  }
+097  try {
+098scanner.close();
+099  } catch (IOException ex) {
+100LOG.warn("Failed to close flush 
scanner, ignoring", ex);
+101  }
+102}
+103return result;
+104  }
+105
+106  private 
StripeMultiFileWriter.WriterFactory createWriterFactory(final long kvCount) {
+107return new 
StripeMultiFileWriter.WriterFactory() {
+108  @Override
+109  public StoreFileWriter 
createWriter() throws IOException {
+110StoreFileWriter writer = 
store.createWriterInTmp(
+111kvCount, 
store.getColumnFamilyDescriptor().getCompressionType(),
+112/* isCompaction = */ false,
+113/* includeMVCCReadpoint = */ 
true,
+114/* includesTags = */ true,
+115/* shouldDropBehind = */ 
false);
+116return writer;
+117  }
+118};
+119  }
+120
+121  /** Stripe flush request wrapper that 
writes a non-striped file. */
+122  public static class StripeFlushRequest 
{
 123
-124  /** Stripe flush request wrapper that 
writes a non-striped file. */
-125  public static class StripeFlushRequest 
{
-126
-127protected final CellComparator 
comparator;
-128
-129public 
StripeFlushRequest(CellComparator comparator) {
-130  this.comparator = comparator;
-131}
-132
-133@VisibleForTesting
-134public StripeMultiFileWriter 
createWriter() throws IOException {
-135  StripeMultiFileWriter writer = new 
StripeMultiFileWriter.SizeMultiWriter(comparator, 1,

[19/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
index 02426b3..679955d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreFileWriter.html
@@ -79,462 +79,413 @@
 071  private long deleteFamilyCnt = 0;
 072  private BloomContext bloomContext = 
null;
 073  private BloomContext 
deleteFamilyBloomContext = null;
-074
-075  /**
-076   * timeRangeTrackerSet is used to 
figure if we were passed a filled-out TimeRangeTracker or not.
-077   * When flushing a memstore, we set the 
TimeRangeTracker that it accumulated during updates to
-078   * memstore in here into this Writer 
and use this variable to indicate that we do not need to
-079   * recalculate the timeRangeTracker 
bounds; it was done already as part of add-to-memstore.
-080   * A completed TimeRangeTracker is not 
set in cases of compactions when it is recalculated.
-081   */
-082   private final boolean 
timeRangeTrackerSet;
-083   final TimeRangeTracker 
timeRangeTracker;
-084
-085  protected HFile.Writer writer;
-086
-087  /**
-088   * Creates an HFile.Writer that also 
write helpful meta data.
-089   * @param fs file system to write to
-090   * @param path file name to create
-091   * @param conf user configuration
-092   * @param comparator key comparator
-093   * @param bloomType bloom filter 
setting
-094   * @param maxKeys the expected maximum 
number of keys to be added. Was used
-095   *for Bloom filter size in 
{@link HFile} format version 1.
-096   * @param fileContext - The HFile 
context
-097   * @param shouldDropCacheBehind Drop 
pages written to page cache after writing the store file.
-098   * @throws IOException problem writing 
to FS
-099   */
-100  StoreFileWriter(FileSystem fs, Path 
path, final Configuration conf, CacheConfig cacheConf,
-101  final CellComparator comparator, 
BloomType bloomType, long maxKeys,
-102  InetSocketAddress[] favoredNodes, 
HFileContext fileContext, boolean shouldDropCacheBehind)
-103  throws IOException {
-104  this(fs, path, conf, cacheConf, 
comparator, bloomType, maxKeys, favoredNodes, fileContext,
-105  shouldDropCacheBehind, null);
-106}
-107
-108/**
-109 * Creates an HFile.Writer that also 
write helpful meta data.
-110 * @param fs file system to write 
to
-111 * @param path file name to create
-112 * @param conf user configuration
-113 * @param comparator key comparator
-114 * @param bloomType bloom filter 
setting
-115 * @param maxKeys the expected 
maximum number of keys to be added. Was used
-116 *for Bloom filter size in 
{@link HFile} format version 1.
-117 * @param favoredNodes
-118 * @param fileContext - The HFile 
context
-119 * @param shouldDropCacheBehind Drop 
pages written to page cache after writing the store file.
-120 * @param trt Ready-made timetracker 
to use.
-121 * @throws IOException problem 
writing to FS
-122 */
-123private StoreFileWriter(FileSystem 
fs, Path path,
-124final Configuration conf,
-125CacheConfig cacheConf,
-126final CellComparator comparator, 
BloomType bloomType, long maxKeys,
-127InetSocketAddress[] favoredNodes, 
HFileContext fileContext,
-128boolean shouldDropCacheBehind, 
final TimeRangeTracker trt)
-129throws IOException {
-130// If passed a TimeRangeTracker, use 
it. Set timeRangeTrackerSet so we don't destroy it.
-131// TODO: put the state of the TRT on 
the TRT; i.e. make a read-only version (TimeRange) when
-132// it no longer writable.
-133this.timeRangeTrackerSet = trt != 
null;
-134this.timeRangeTracker = 
this.timeRangeTrackerSet? trt: 
TimeRangeTracker.create(TimeRangeTracker.Type.NON_SYNC);
-135// TODO : Change all writers to be 
specifically created for compaction context
-136writer = HFile.getWriterFactory(conf, 
cacheConf)
-137.withPath(fs, path)
-138.withComparator(comparator)
-139.withFavoredNodes(favoredNodes)
-140.withFileContext(fileContext)
-141
.withShouldDropCacheBehind(shouldDropCacheBehind)
-142.create();
-143
-144generalBloomFilterWriter = 
BloomFilterFactory.createGeneralBloomAtWrite(
-145conf, cacheConf, bloomType,
-146(int) Math.min(maxKeys, 
Integer.MAX_VALUE), writer);
-147
-148if (generalBloomFilterWriter != null) 
{
-149  this.bloomType = bloomType;
-150  if (LOG.isTraceEnabled()) {
-151LOG.trace("Bloom filter type for 
" + path + ": " + this.bloomType + ", " +
-152

[03/29] hbase-site git commit: Published site at .

2017-10-09 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6ae7c3e/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStoreHook.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStoreHook.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStoreHook.html
index f49cc1a..956e7fd 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStoreHook.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStore.MyStoreHook.html
@@ -523,1289 +523,1320 @@
 515assertCheck();
 516  }
 517
-518  /**
-519   * Getting data from files only
-520   * @throws IOException
-521   */
-522  @Test
-523  public void testGet_FromFilesOnly() 
throws IOException {
-524init(this.name.getMethodName());
-525
-526//Put data in memstore
-527this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-528this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-529//flush
-530flush(1);
-531
-532//Add more data
-533this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-534this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-535//flush
-536flush(2);
-537
-538//Add more data
-539this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-540this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
-541//flush
-542flush(3);
-543
-544//Get
-545result = 
HBaseTestingUtility.getFromStoreFile(store,
-546get.getRow(),
-547qualifiers);
-548//this.store.get(get, qualifiers, 
result);
-549
-550//Need to sort the result since 
multiple files
-551Collections.sort(result, 
CellComparator.COMPARATOR);
-552
-553//Compare
-554assertCheck();
-555  }
+518  @Test
+519  public void 
testTimeRangeIfSomeCellsAreDroppedInFlush() throws IOException {
+520
testTimeRangeIfSomeCellsAreDroppedInFlush(1);
+521
testTimeRangeIfSomeCellsAreDroppedInFlush(3);
+522
testTimeRangeIfSomeCellsAreDroppedInFlush(5);
+523  }
+524
+525  private void 
testTimeRangeIfSomeCellsAreDroppedInFlush(int maxVersion) throws IOException 
{
+526init(this.name.getMethodName(), 
TEST_UTIL.getConfiguration(),
+527
ColumnFamilyDescriptorBuilder.newBuilder(family).setMaxVersions(maxVersion).build());
+528long currentTs = 100;
+529long minTs = currentTs;
+530// the extra cell won't be flushed to 
disk,
+531// so the min of timerange will be 
different between memStore and hfile.
+532for (int i = 0; i != (maxVersion + 
1); ++i) {
+533  this.store.add(new KeyValue(row, 
family, qf1, ++currentTs, (byte[])null), null);
+534  if (i == 1) {
+535minTs = currentTs;
+536  }
+537}
+538flushStore(store, id++);
+539
+540CollectionHStoreFile files = 
store.getStorefiles();
+541assertEquals(1, files.size());
+542HStoreFile f = 
files.iterator().next();
+543f.initReader();
+544StoreFileReader reader = 
f.getReader();
+545assertEquals(minTs, 
reader.timeRange.getMin());
+546assertEquals(currentTs, 
reader.timeRange.getMax());
+547  }
+548
+549  /**
+550   * Getting data from files only
+551   * @throws IOException
+552   */
+553  @Test
+554  public void testGet_FromFilesOnly() 
throws IOException {
+555init(this.name.getMethodName());
 556
-557  /**
-558   * Getting data from memstore and 
files
-559   * @throws IOException
-560   */
-561  @Test
-562  public void 
testGet_FromMemStoreAndFiles() throws IOException {
-563init(this.name.getMethodName());
-564
-565//Put data in memstore
-566this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
-567this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
-568//flush
-569flush(1);
-570
-571//Add more data
-572this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
-573this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
-574//flush
-575flush(2);
-576
-577//Add more data
-578this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
-579this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), null);
+557//Put data in memstore
+558this.store.add(new KeyValue(row, 
family, qf1, 1, (byte[])null), null);
+559this.store.add(new KeyValue(row, 
family, qf2, 1, (byte[])null), null);
+560//flush
+561flush(1);
+562
+563//Add more data
+564this.store.add(new KeyValue(row, 
family, qf3, 1, (byte[])null), null);
+565this.store.add(new KeyValue(row, 
family, qf4, 1, (byte[])null), null);
+566//flush
+567flush(2);
+568
+569//Add more data
+570this.store.add(new KeyValue(row, 
family, qf5, 1, (byte[])null), null);
+571this.store.add(new KeyValue(row, 
family, qf6, 1, (byte[])null), 

[1/2] hbase git commit: HBASE-18924 Backport HBASE-18568 (Correct metric of numRegions) to branch-1.2 and branch-1.3

2017-10-09 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 02051 -> 2f0101df2
  refs/heads/branch-1.3 15288c055 -> 0d836d986


HBASE-18924 Backport HBASE-18568 (Correct metric of numRegions) to branch-1.2 
and branch-1.3

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f0101df
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f0101df
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f0101df

Branch: refs/heads/branch-1.2
Commit: 2f0101df25b5096757aeb8e768d110fa9b8b686f
Parents: 020
Author: Peter Somogyi 
Authored: Mon Oct 2 14:57:05 2017 -0700
Committer: Mike Drob 
Committed: Mon Oct 9 09:15:58 2017 -0500

--
 .../apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f0101df/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 86df792..4a28bc4 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -65,6 +65,7 @@ public class MetricsRegionSourceImpl implements 
MetricsRegionSource {
  MetricsRegionAggregateSourceImpl aggregate) {
 this.regionWrapper = regionWrapper;
 agg = aggregate;
+hashCode = regionWrapper.getRegionHashCode();
 agg.register(this);
 
 LOG.debug("Creating new MetricsRegionSourceImpl for table " +
@@ -96,8 +97,6 @@ public class MetricsRegionSourceImpl implements 
MetricsRegionSource {
 
 regionScanNextKey = regionNamePrefix + 
MetricsRegionServerSource.SCAN_NEXT_KEY;
 regionScanNext = registry.newTimeHistogram(regionScanNextKey);
-
-hashCode = regionWrapper.getRegionHashCode();
   }
 
   @Override



[2/2] hbase git commit: HBASE-18924 Backport HBASE-18568 (Correct metric of numRegions) to branch-1.2 and branch-1.3

2017-10-09 Thread mdrob
HBASE-18924 Backport HBASE-18568 (Correct metric of numRegions) to branch-1.2 
and branch-1.3

Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d836d98
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d836d98
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d836d98

Branch: refs/heads/branch-1.3
Commit: 0d836d986f04d8e6ca09774ff08db603ae34c21f
Parents: 15288c0
Author: Peter Somogyi 
Authored: Fri Oct 6 10:45:27 2017 -0700
Committer: Mike Drob 
Committed: Mon Oct 9 09:26:36 2017 -0500

--
 .../apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d836d98/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
index 8f17e93..924da8b 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
@@ -69,6 +69,7 @@ public class MetricsRegionSourceImpl implements 
MetricsRegionSource {
  MetricsRegionAggregateSourceImpl aggregate) {
 this.regionWrapper = regionWrapper;
 agg = aggregate;
+hashCode = regionWrapper.getRegionHashCode();
 agg.register(this);
 
 LOG.debug("Creating new MetricsRegionSourceImpl for table " +
@@ -100,8 +101,6 @@ public class MetricsRegionSourceImpl implements 
MetricsRegionSource {
 
 regionScanKey = regionNamePrefix + MetricsRegionServerSource.SCAN_KEY + 
suffix;
 regionScan = registry.getCounter(regionScanKey, 0L);
-
-hashCode = regionWrapper.getRegionHashCode();
   }
 
   @Override



hbase git commit: HBASE-18752 Recalculate the TimeRange in flushing snapshot to store file

2017-10-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 496fcda1d -> 13a53811d


HBASE-18752 Recalculate the TimeRange in flushing snapshot to store file


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13a53811
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13a53811
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13a53811

Branch: refs/heads/branch-2
Commit: 13a53811de2ced9c6d599e2f91a777d2ad1a9589
Parents: 496fcda
Author: Chia-Ping Tsai 
Authored: Thu Oct 5 22:17:16 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Oct 9 17:22:19 2017 +0800

--
 .../hbase/mob/DefaultMobStoreFlusher.java   |  2 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |  3 +-
 .../hadoop/hbase/regionserver/HStore.java   | 20 +--
 .../hbase/regionserver/StoreFileWriter.java | 59 ++--
 .../hbase/regionserver/StripeStoreFlusher.java  |  9 +--
 .../hadoop/hbase/regionserver/TestHStore.java   | 31 ++
 6 files changed, 42 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/13a53811/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index bef73f2..5b49862 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -119,7 +119,7 @@ public class DefaultMobStoreFlusher extends 
DefaultStoreFlusher {
 status.setStatus("Flushing " + store + ": creating writer");
 // Write the map out to the disk
 writer = store.createWriterInTmp(cellsCount, 
store.getColumnFamilyDescriptor().getCompressionType(),
-false, true, true, false, snapshot.getTimeRangeTracker());
+false, true, true, false);
 IOException e = null;
 try {
   // It's a mob store, flush the cells in a mob way. This is the 
difference of flushing

http://git-wip-us.apache.org/repos/asf/hbase/blob/13a53811/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index a5dd9f7..2e907e8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
@@ -68,8 +68,7 @@ public class DefaultStoreFlusher extends StoreFlusher {
 /* isCompaction = */ false,
 /* includeMVCCReadpoint = */ true,
 /* includesTags = */ snapshot.isTagsPresent(),
-/* shouldDropBehind = */ false,
-snapshot.getTimeRangeTracker());
+/* shouldDropBehind = */ false);
 IOException e = null;
 try {
   performFlush(scanner, writer, smallestReadPoint, 
throughputController);

http://git-wip-us.apache.org/repos/asf/hbase/blob/13a53811/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 92171d3..d8e82bb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -1034,26 +1034,11 @@ public class HStore implements Store, HeapSize, 
StoreConfigInformation, Propagat
* @param includesTag - includesTag or not
* @return Writer for a new StoreFile in the tmp dir.
*/
-  public StoreFileWriter createWriterInTmp(long maxKeyCount, 
Compression.Algorithm compression,
-  boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,
-  boolean shouldDropBehind) throws IOException {
-return createWriterInTmp(maxKeyCount, compression, isCompaction, 
includeMVCCReadpoint,
-  includesTag, shouldDropBehind, null);
-  }
-
-  /**
-   * @param maxKeyCount
-   * @param compression Compression algorithm to use
-   * @param isCompaction whether we are creating a new file in a compaction
-   * @param includeMVCCReadpoint - whether to include MVCC or not
-   * @param includesTag - includesTag or not
-  

hbase git commit: HBASE-18752 Recalculate the TimeRange in flushing snapshot to store file

2017-10-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master fcdf96a0e -> e2cef8aa8


HBASE-18752 Recalculate the TimeRange in flushing snapshot to store file


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e2cef8aa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e2cef8aa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e2cef8aa

Branch: refs/heads/master
Commit: e2cef8aa805478feb7752fab738ee997e2bf374f
Parents: fcdf96a
Author: Chia-Ping Tsai 
Authored: Thu Oct 5 22:17:16 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Oct 9 17:22:29 2017 +0800

--
 .../hbase/mob/DefaultMobStoreFlusher.java   |  2 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |  3 +-
 .../hadoop/hbase/regionserver/HStore.java   | 20 +--
 .../hbase/regionserver/StoreFileWriter.java | 59 ++--
 .../hbase/regionserver/StripeStoreFlusher.java  |  9 +--
 .../hadoop/hbase/regionserver/TestHStore.java   | 31 ++
 6 files changed, 42 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e2cef8aa/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index bef73f2..5b49862 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -119,7 +119,7 @@ public class DefaultMobStoreFlusher extends 
DefaultStoreFlusher {
 status.setStatus("Flushing " + store + ": creating writer");
 // Write the map out to the disk
 writer = store.createWriterInTmp(cellsCount, 
store.getColumnFamilyDescriptor().getCompressionType(),
-false, true, true, false, snapshot.getTimeRangeTracker());
+false, true, true, false);
 IOException e = null;
 try {
   // It's a mob store, flush the cells in a mob way. This is the 
difference of flushing

http://git-wip-us.apache.org/repos/asf/hbase/blob/e2cef8aa/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index a5dd9f7..2e907e8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
@@ -68,8 +68,7 @@ public class DefaultStoreFlusher extends StoreFlusher {
 /* isCompaction = */ false,
 /* includeMVCCReadpoint = */ true,
 /* includesTags = */ snapshot.isTagsPresent(),
-/* shouldDropBehind = */ false,
-snapshot.getTimeRangeTracker());
+/* shouldDropBehind = */ false);
 IOException e = null;
 try {
   performFlush(scanner, writer, smallestReadPoint, 
throughputController);

http://git-wip-us.apache.org/repos/asf/hbase/blob/e2cef8aa/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 92171d3..d8e82bb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -1034,26 +1034,11 @@ public class HStore implements Store, HeapSize, 
StoreConfigInformation, Propagat
* @param includesTag - includesTag or not
* @return Writer for a new StoreFile in the tmp dir.
*/
-  public StoreFileWriter createWriterInTmp(long maxKeyCount, 
Compression.Algorithm compression,
-  boolean isCompaction, boolean includeMVCCReadpoint, boolean includesTag,
-  boolean shouldDropBehind) throws IOException {
-return createWriterInTmp(maxKeyCount, compression, isCompaction, 
includeMVCCReadpoint,
-  includesTag, shouldDropBehind, null);
-  }
-
-  /**
-   * @param maxKeyCount
-   * @param compression Compression algorithm to use
-   * @param isCompaction whether we are creating a new file in a compaction
-   * @param includeMVCCReadpoint - whether to include MVCC or not
-   * @param includesTag - includesTag or not
-   *