[04/49] hbase-site git commit: Published site at 3810ba2c6edfc531181ffc9e6c68396a0c2d2027.

2018-09-12 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/705d69c4/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
index 826bcba..5323511 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/rest/TestTableScan.ClientSideCellSetModel.html
@@ -98,580 +98,623 @@
 090  private static final String CFB = 
"b";
 091  private static final String COLUMN_1 = 
CFA + ":1";
 092  private static final String COLUMN_2 = 
CFB + ":2";
-093  private static Client client;
-094  private static int expectedRows1;
-095  private static int expectedRows2;
-096  private static Configuration conf;
-097
-098  private static final 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-099  private static final 
HBaseRESTTestingUtility REST_TEST_UTIL =
-100new HBaseRESTTestingUtility();
-101
-102  @BeforeClass
-103  public static void setUpBeforeClass() 
throws Exception {
-104conf = 
TEST_UTIL.getConfiguration();
-105conf.set(Constants.CUSTOM_FILTERS, 
"CustomFilter:" + CustomFilter.class.getName());
-106TEST_UTIL.startMiniCluster();
-107
REST_TEST_UTIL.startServletContainer(conf);
-108client = new Client(new 
Cluster().add("localhost",
-109  
REST_TEST_UTIL.getServletPort()));
-110Admin admin = TEST_UTIL.getAdmin();
-111if (!admin.tableExists(TABLE)) {
-112HTableDescriptor htd = new 
HTableDescriptor(TABLE);
-113htd.addFamily(new 
HColumnDescriptor(CFA));
-114htd.addFamily(new 
HColumnDescriptor(CFB));
-115admin.createTable(htd);
-116expectedRows1 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
-117expectedRows2 = 
TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
-118}
-119  }
-120
-121  @AfterClass
-122  public static void tearDownAfterClass() 
throws Exception {
-123
TEST_UTIL.getAdmin().disableTable(TABLE);
-124
TEST_UTIL.getAdmin().deleteTable(TABLE);
-125
REST_TEST_UTIL.shutdownServletContainer();
-126TEST_UTIL.shutdownMiniCluster();
-127  }
-128
-129  @Test
-130  public void testSimpleScannerXML() 
throws IOException, JAXBException, XMLStreamException {
-131// Test scanning particular columns
-132StringBuilder builder = new 
StringBuilder();
-133builder.append("/*");
-134builder.append("?");
-135builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-136builder.append("");
-137builder.append(Constants.SCAN_LIMIT + 
"=10");
-138Response response = client.get("/" + 
TABLE + builder.toString(),
-139  Constants.MIMETYPE_XML);
-140assertEquals(200, 
response.getCode());
-141assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-142JAXBContext ctx = 
JAXBContext.newInstance(CellSetModel.class);
-143Unmarshaller ush = 
ctx.createUnmarshaller();
-144CellSetModel model = (CellSetModel) 
ush.unmarshal(response.getStream());
-145int count = 
TestScannerResource.countCellSet(model);
-146assertEquals(10, count);
-147checkRowsNotNull(model);
-148
-149//Test with no limit.
-150builder = new StringBuilder();
-151builder.append("/*");
-152builder.append("?");
-153builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-154response = client.get("/" + TABLE + 
builder.toString(),
-155  Constants.MIMETYPE_XML);
-156assertEquals(200, 
response.getCode());
-157assertEquals(Constants.MIMETYPE_XML, 
response.getHeader("content-type"));
-158model = (CellSetModel) 
ush.unmarshal(response.getStream());
-159count = 
TestScannerResource.countCellSet(model);
-160assertEquals(expectedRows1, count);
-161checkRowsNotNull(model);
-162
-163//Test with start and end row.
-164builder = new StringBuilder();
-165builder.append("/*");
-166builder.append("?");
-167builder.append(Constants.SCAN_COLUMN 
+ "=" + COLUMN_1);
-168builder.append("");
-169
builder.append(Constants.SCAN_START_ROW + "=aaa");
-170builder.append("");
-171builder.append(Constants.SCAN_END_ROW 
+ "=aay");
-172response = client.get("/" + TABLE + 
builder.toString(),
-173  Constants.MIMETYPE_XML);
-174assertEquals(200, 
response.getCode());
-175model = (CellSetModel) 
ush.unmarshal(response.getStream());
-176count = 
TestScannerResource.countCellSet(model);
-177RowModel startRow = 
model.getRows().get(0);
-178assertEquals("aaa", 
Bytes.toString(startRow.getKey()));
-179RowModel endRow = 
model.getRows().get(model.getRows().size() - 1);
-180assertEquals("aax", 
Bytes.toString(endRow.getKey()));
-181assertEquals(24, count);
-182

[04/49] hbase-site git commit: Published site at .

2017-09-24 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e6bd0774/testdevapidocs/src-html/org/apache/hadoop/hbase/quotas/TestGlobalQuotaSettings.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/quotas/TestGlobalQuotaSettings.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/quotas/TestGlobalQuotaSettings.html
new file mode 100644
index 000..72e619a
--- /dev/null
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/quotas/TestGlobalQuotaSettings.html
@@ -0,0 +1,194 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/*
+002 * Licensed to the Apache Software 
Foundation (ASF) under one or more
+003 * contributor license agreements.  See 
the NOTICE file distributed with
+004 * this work for additional information 
regarding copyright ownership.
+005 * The ASF licenses this file to you 
under the Apache License, Version 2.0
+006 * (the "License"); you may not use this 
file except in compliance with
+007 * the License.  You may obtain a copy of 
the License at
+008 *
+009 * 
http://www.apache.org/licenses/LICENSE-2.0
+010 *
+011 * Unless required by applicable law or 
agreed to in writing, software
+012 * distributed under the License is 
distributed on an "AS IS" BASIS,
+013 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+014 * See the License for the specific 
language governing permissions and
+015 * limitations under the License.
+016 */
+017package org.apache.hadoop.hbase.quotas;
+018
+019import static 
org.junit.Assert.assertEquals;
+020import static 
org.junit.Assert.assertTrue;
+021
+022import java.io.IOException;
+023
+024import 
org.apache.hadoop.hbase.TableName;
+025import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+026import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos;
+027import 
org.apache.hadoop.hbase.testclassification.SmallTests;
+028import org.junit.Test;
+029import 
org.junit.experimental.categories.Category;
+030
+031@Category(SmallTests.class)
+032public class TestGlobalQuotaSettings {
+033
+034  QuotaProtos.TimedQuota REQUEST_THROTTLE 
= QuotaProtos.TimedQuota.newBuilder()
+035  
.setScope(QuotaProtos.QuotaScope.MACHINE).setSoftLimit(100)
+036  
.setTimeUnit(HBaseProtos.TimeUnit.MINUTES).build();
+037  QuotaProtos.Throttle THROTTLE = 
QuotaProtos.Throttle.newBuilder()
+038  
.setReqNum(REQUEST_THROTTLE).build();
+039
+040  QuotaProtos.SpaceQuota SPACE_QUOTA = 
QuotaProtos.SpaceQuota.newBuilder()
+041  .setSoftLimit(1024L * 
1024L).setViolationPolicy(QuotaProtos.SpaceViolationPolicy.NO_WRITES)
+042  .build();
+043
+044  @Test
+045  public void testMergeThrottle() throws 
IOException {
+046QuotaProtos.Quotas quota = 
QuotaProtos.Quotas.newBuilder()
+047.setThrottle(THROTTLE).build();
+048QuotaProtos.TimedQuota writeQuota = 
REQUEST_THROTTLE.toBuilder()
+049.setSoftLimit(500).build();
+050// Unset the req throttle, set a 
write throttle
+051QuotaProtos.ThrottleRequest 
writeThrottle = QuotaProtos.ThrottleRequest.newBuilder()
+052
.setTimedQuota(writeQuota).setType(QuotaProtos.ThrottleType.WRITE_NUMBER).build();
+053
+054GlobalQuotaSettings settings = new 
GlobalQuotaSettings("joe", null, null, quota);
+055GlobalQuotaSettings merged = 
settings.merge(
+056new ThrottleSettings("joe", null, 
null, writeThrottle));
+057
+058QuotaProtos.Throttle mergedThrottle = 
merged.getThrottleProto();
+059// Verify the request throttle is in 
place
+060
assertTrue(mergedThrottle.hasReqNum());
+061QuotaProtos.TimedQuota actualReqNum = 
mergedThrottle.getReqNum();
+062
assertEquals(REQUEST_THROTTLE.getSoftLimit(), actualReqNum.getSoftLimit());
+063
+064// Verify the write throttle is in 
place
+065
assertTrue(mergedThrottle.hasWriteNum());
+066QuotaProtos.TimedQuota actualWriteNum 
= mergedThrottle.getWriteNum();
+067
assertEquals(writeQuota.getSoftLimit(), actualWriteNum.getSoftLimit());
+068  }
+069
+070  @Test
+071  public void testMergeSpace() throws 
IOException {
+072TableName tn = 
TableName.valueOf("foo");
+073QuotaProtos.Quotas quota = 
QuotaProtos.Quotas.newBuilder()
+074.setSpace(SPACE_QUOTA).build();
+075
+076GlobalQuotaSettings settings = new 
GlobalQuotaSettings(null, tn, null, quota);
+077// Switch the violation policy to 
DISABLE
+078GlobalQuotaSettings merged = 
settings.merge(
+079new SpaceLimitSettings(tn, 
SPACE_QUOTA.getSoftLimit(), SpaceViolationPolicy.DISABLE));
+080
+081QuotaProtos.SpaceQuota 
mergedSpaceQuota = merged.getSpaceProto();
+082
assertEquals(SPACE_QUOTA.getSoftLimit(), mergedSpaceQuota.getSoftLimit());
+083assertEquals(
+084
QuotaProtos.SpaceViolationPolicy.DISABLE, 
mergedSpaceQuota.getViolationPolicy());
+085  }
+086
+087  @Test
+088  public void testMergeThrottleAndSpace() 
throws IOException {
+089final String ns = 

[04/49] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-14 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/b17bf22b/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 576c9b3..b876c89 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -9357,12 +9357,16 @@
 
 DUMMY_TABLE
 - Static variable in class org.apache.hadoop.hbase.client.TestSimpleRequestController
 
+DUMMY_VALUE
 - Static variable in class org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint
+
 DUMMY_VALUE
 - Static variable in class org.apache.hadoop.hbase.coprocessor.TestRegionServerCoprocessorEndpoint
 
 DUMMY_VALUE
 - Static variable in class org.apache.hadoop.hbase.security.visibility.ExpAsStringVisibilityLabelServiceImpl
 
 DummyAssignmentListener()
 - Constructor for class org.apache.hadoop.hbase.master.TestAssignmentListener.DummyAssignmentListener
 
+dummyCall(RpcController,
 DummyRegionServerEndpointProtos.DummyRequest, 
RpcCallbackDummyRegionServerEndpointProtos.DummyResponse) - 
Method in class org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint.DummyRegionServerEndpoint
+
 dummyCall(RpcController,
 DummyRegionServerEndpointProtos.DummyRequest, 
RpcCallbackDummyRegionServerEndpointProtos.DummyResponse) - 
Method in class org.apache.hadoop.hbase.coprocessor.TestRegionServerCoprocessorEndpoint.DummyRegionServerEndpoint
 
 DummyCompactionPolicy(Configuration,
 StoreConfigInformation) - Constructor for class 
org.apache.hadoop.hbase.regionserver.TestDefaultStoreEngine.DummyCompactionPolicy
@@ -9391,6 +9395,8 @@
 
 DummyRegionObserver()
 - Constructor for class org.apache.hadoop.hbase.coprocessor.TestHTableWrapper.DummyRegionObserver
 
+DummyRegionServerEndpoint()
 - Constructor for class org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint.DummyRegionServerEndpoint
+
 DummyRegionServerEndpoint()
 - Constructor for class org.apache.hadoop.hbase.coprocessor.TestRegionServerCoprocessorEndpoint.DummyRegionServerEndpoint
 
 DummyReplicationListener()
 - Constructor for class org.apache.hadoop.hbase.replication.TestReplicationTrackerZKImpl.DummyReplicationListener
@@ -9429,6 +9435,8 @@
 
 DummyStoreFlusher(Configuration,
 Store) - Constructor for class 
org.apache.hadoop.hbase.regionserver.TestDefaultStoreEngine.DummyStoreFlusher
 
+dummyThrow(RpcController,
 DummyRegionServerEndpointProtos.DummyRequest, 
RpcCallbackDummyRegionServerEndpointProtos.DummyResponse) - 
Method in class org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint.DummyRegionServerEndpoint
+
 dummyThrow(RpcController,
 DummyRegionServerEndpointProtos.DummyRequest, 
RpcCallbackDummyRegionServerEndpointProtos.DummyResponse) - 
Method in class org.apache.hadoop.hbase.coprocessor.TestRegionServerCoprocessorEndpoint.DummyRegionServerEndpoint
 
 DummyWALActionsListener()
 - Constructor for class org.apache.hadoop.hbase.regionserver.TestWALLockup.DummyWALActionsListener
@@ -14669,6 +14677,8 @@
 
 getService()
 - Method in class org.apache.hadoop.hbase.coprocessor.ProtobufCoprocessorService
 
+getService()
 - Method in class org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint.DummyRegionServerEndpoint
+
 getService()
 - Method in class org.apache.hadoop.hbase.coprocessor.TestRegionServerCoprocessorEndpoint.DummyRegionServerEndpoint
 
 getService()
 - Method in class org.apache.hadoop.hbase.regionserver.TestServerCustomProtocol.PingHandler
@@ -23143,6 +23153,8 @@
 
 NUM_MASTERS
 - Static variable in class org.apache.hadoop.hbase.master.TestDistributedLogSplitting
 
+NUM_MASTERS
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestRegionServerHostname
+
 NUM_MASTERS
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestRSKilledWhenInitializing
 
 NUM_MASTERS
 - Static variable in class org.apache.hadoop.hbase.wal.TestWALFiltering
@@ -23255,6 +23267,8 @@
 
 NUM_RS
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestCompactSplitThread
 
+NUM_RS
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestRegionServerHostname
+
 NUM_RS
 - Static variable in class org.apache.hadoop.hbase.regionserver.TestRSKilledWhenInitializing
 
 NUM_RS
 - Static variable in class org.apache.hadoop.hbase.snapshot.TestFlushSnapshotFromClient
@@ -32003,6 +32017,8 @@
 
 setup()
 - Method in class org.apache.hadoop.hbase.regionserver.TestRegionServerAbort
 
+setup()
 - Method in class org.apache.hadoop.hbase.regionserver.TestRegionServerHostname
+
 setUp()
 - Static method in class org.apache.hadoop.hbase.regionserver.TestRegionServerOnlineConfigChange
 
 setUp()
 - Method in class org.apache.hadoop.hbase.regionserver.TestRegionServerReportForDuty
@@ -32378,6 +32394,8 @@
 
 setUpBeforeClass()
 - Static method in class org.apache.hadoop.hbase.constraint.TestConstraint
 
+setUpBeforeClass()
 - Static method in class org.apache.hadoop.hbase.coprocessor.TestAsyncCoprocessorEndpoint
+
 

[04/49] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-06-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cd7ae54c/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
index 1217cfe..777c233 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-15
+Last Published: 2017-06-16
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype
@@ -129,10 +129,10 @@
 33
 
 Number of dependencies (NOD):
-93
+92
 
 Number of unique artifacts (NOA):
-94
+93
 
 Number of SNAPSHOT artifacts (NOS):
 0
@@ -266,7 +266,7 @@
 
 
 
-1.3.1
+1.4
 
 
 http://hbase.apache.org/hbase-it;>org.apache.hbase:hbase-it
@@ -309,7 +309,7 @@
 
 
 
-2.4
+2.5
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -366,18 +366,6 @@
 http://hbase.apache.org/hbase-spark;>org.apache.hbase:hbase-spark
 http://hbase.apache.org/hbase-thrift;>org.apache.hbase:hbase-thrift
 
-commons-net:commons-net
-
-
-
-
-
-
-3.1
-
-
-http://hbase.apache.org/hbase-thrift;>org.apache.hbase:hbase-thrift
-
 io.dropwizard.metrics:metrics-core
 
 
@@ -385,7 +373,7 @@
 
 
 
-3.1.2
+3.2.1
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -416,7 +404,7 @@
 
 
 
-4.1.1.Final
+4.1.9.Final
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -444,7 +432,7 @@
 
 
 
-2.2.2
+2.2.12
 
 
 http://hbase.apache.org/hbase-rest;>org.apache.hbase:hbase-rest
@@ -556,7 +544,7 @@
 
 
 
-2.11.6
+2.12.2
 
 
 http://hbase.apache.org/hbase-external-blockcache;>org.apache.hbase:hbase-external-blockcache
@@ -608,7 +596,7 @@
 
 
 
-2.11.0
+2.12.0
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -620,7 +608,7 @@
 
 
 
-2.11.0
+2.12.0
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -632,7 +620,7 @@
 
 
 
-2.11.0
+2.12.0
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -1217,7 +1205,7 @@
 
 
 
-3.1.0-incubating
+3.2.0-incubating
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -1235,7 +1223,7 @@
 
 
 
-4.5.2
+4.5.3
 
 
 http://hbase.apache.org/hbase-assembly;>org.apache.hbase:hbase-assembly
@@ -1249,7 +1237,7 @@
 
 
 
-4.4.4
+4.4.6
 
 
 http://hbase.apache.org/hbase-assembly;>org.apache.hbase:hbase-assembly
@@ -1337,7 +1325,7 @@
 
 
 
-3.4.8
+3.4.9
 
 
 http://hbase.apache.org/hbase-client;>org.apache.hbase:hbase-client
@@ -1492,7 +1480,7 @@
 
 
 
-2.22.2
+2.25.1
 
 
 http://hbase.apache.org/hbase-rest;>org.apache.hbase:hbase-rest
@@ -1568,7 +1556,7 @@
 
 
 
-9.1.9.0
+9.1.10.0
 
 
 http://hbase.apache.org/hbase-shell;>org.apache.hbase:hbase-shell
@@ -1580,7 +1568,7 @@
 
 
 
-1.10.8
+1.10.19
 
 
 http://hbase.apache.org/hbase-annotations;>org.apache.hbase:hbase-annotations
@@ -1660,7 +1648,7 @@
 
 
 
-1.7.7
+1.7.24
 
 
 http://hbase.apache.org/hbase-thrift;>org.apache.hbase:hbase-thrift
@@ -1672,7 +1660,7 @@
 
 
 
-1.7.7
+1.7.24
 
 
 http://hbase.apache.org/hbase-testing-util;>org.apache.hbase:hbase-testing-util

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cd7ae54c/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/dependency-info.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
index 6980a71..5458e7e 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-06-15
+Last Published: 2017-06-16
   | Version: 
3.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/cd7ae54c/hbase-archetypes/hbase-shaded-client-project/dependency-management.html

[04/49] hbase-site git commit: Published site at 4b3e38705cb24aee82615b1b9af47ed549ea1358.

2016-03-03 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/4ce8323f/xref/org/apache/hadoop/hbase/regionserver/SegmentScanner.html
--
diff --git a/xref/org/apache/hadoop/hbase/regionserver/SegmentScanner.html 
b/xref/org/apache/hadoop/hbase/regionserver/SegmentScanner.html
index fe37177..d7b93a8 100644
--- a/xref/org/apache/hadoop/hbase/regionserver/SegmentScanner.html
+++ b/xref/org/apache/hadoop/hbase/regionserver/SegmentScanner.html
@@ -29,137 +29,361 @@
 19  package org.apache.hadoop.hbase.regionserver;
 20  
 21  import java.io.IOException;
-22  
-23  import org.apache.hadoop.hbase.Cell;
-24  import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-25  import org.apache.hadoop.hbase.client.Scan;
-26  
-27  /**
-28   * An abstraction for store segment scanner.
-29   */
-30  
@InterfaceAudience.Private
-31  public abstract class SegmentScanner
 implements KeyValueScanner
 {
-32  
-33private long 
sequenceID = Long.MAX_VALUE;
-34  
-35protected abstract Segment
 getSegment();
-36  
-37/**
-38 * Get the sequence id associated with this 
KeyValueScanner. This is required
-39 * for comparing multiple files (or memstore 
segments) scanners to find out
-40 * which one has the latest data.
-41 *
-42 */
-43@Override
-44public long 
getSequenceID() {
-45  return sequenceID;
-46}
-47  
-48/**
-49 * Close the KeyValue scanner.
-50 */
-51@Override
-52public void 
close() {
-53  
getSegment().decScannerCount();
-54}
-55  
-56/**
-57 * This functionality should be resolved in the 
higher level which is
-58 * MemStoreScanner, currently returns true as 
default. Doesn't throw
-59 * IllegalStateException in order not to change 
the signature of the
-60 * overridden method
-61 */
-62@Override
-63public boolean shouldUseScanner(Scan scan, 
Store 
store, long oldestUnexpiredTS) {
-64  return true;
-65}
-66/**
-67 * This scanner is working solely on the in-memory 
MemStore therefore this
-68 * interface is not relevant.
-69 */
-70@Override
-71public boolean requestSeek(Cell c, boolean forward, boolean useBloom)
-72throws IOException {
-73  
-74  throw new 
IllegalStateException(
-75  "requestSeek cannot be called on 
MutableCellSetSegmentScanner");
+22  import java.util.Iterator;
+23  import java.util.SortedSet;
+24  
+25  import org.apache.hadoop.hbase.Cell;
+26  import org.apache.hadoop.hbase.CellUtil;
+27  import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+28  import org.apache.hadoop.hbase.client.Scan;
+29  
+30  /**
+31   * A scanner of a single memstore segment.
+32   */
+33  
@InterfaceAudience.Private
+34  public class 
SegmentScanner
 implements KeyValueScanner
 {
+35  
+36private long 
sequenceID = Long.MAX_VALUE;
+37  
+38// the observed structure
+39private final 
Segment
 segment;
+40// the highest relevant MVCC
+41private long 
readPoint;
+42// the current iterator that can be reinitialized by
+43// seek(), backwardSeek(), or reseek()
+44private IteratorCell iter;
+45// the pre-calculated cell to be returned by peek()
+46private Cell current = 
null;
+47// or next()
+48// A flag represents whether could stop skipping KeyValues 
for MVCC
+49// if have encountered the next row. Only used for reversed 
scan
+50private boolean stopSkippingKVsIfNextRow = false;
+51// last iterated KVs by seek (to restore the iterator state 
after reseek)
+52private Cell last = null;
+53  
+54protected SegmentScanner(Segment
 segment, long readPoint) {
+55  this.segment = segment;
+56  this.readPoint = readPoint;
+57  iter = 
segment.iterator();
+58  // the initialization of the current is required for 
working with heap of SegmentScanners
+59  current = getNext();
+60  //increase the reference count so the underlying structure 
will not be de-allocated
+61  this.segment.incScannerCount();
+62}
+63  
+64/**
+65 * Look at the next Cell in this scanner, but do 
not iterate the scanner
+66 * @return the currently observed Cell
+67 */
+68@Override
+69public Cell peek() {   
   // sanity check, the current should be always 
valid
+70  if (current!=null  current.getSequenceId()  
readPoint) {
+71throw new 
RuntimeException("current is invalid: read point is 
"+readPoint+", " +
+72"while current sequence id is " 
+current.getSequenceId());
+73  }
+74  
+75  return current;
 76}
 77  
 78/**
-79 * This scanner is working solely on the in-memory 
MemStore and doesn't work on
-80 * store files, MutableCellSetSegmentScanner 
always does the seek,
-81 * therefore always returning true.
-82 */
-83@Override
-84public boolean realSeekDone() {
-85  return true;
-86}
-87  
-88/**
-89 * This function