git commit: PHOENIX-539 Chunked loading of parallel scanning

2014-07-08 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.0 8496d76ac - 2625274eb


PHOENIX-539 Chunked loading of parallel scanning

Instead of spooling all data in a table to disk, load it in chunks
as needed, potentially spooling large quantities of data to disk.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2625274e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2625274e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2625274e

Branch: refs/heads/4.0
Commit: 2625274ebcba7aa250e6064ec66538e074737604
Parents: 8496d76
Author: Gabriel Reid gr...@apache.org
Authored: Tue Jun 10 07:36:37 2014 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Jul 8 21:28:43 2014 +0200

--
 .../org/apache/phoenix/end2end/QueryIT.java |  11 +-
 .../phoenix/compile/StatementContext.java   |  23 +++
 .../org/apache/phoenix/execute/ScanPlan.java|  43 ++--
 .../phoenix/iterate/ChunkedResultIterator.java  | 196 +++
 .../phoenix/iterate/ParallelIterators.java  |   7 +-
 .../org/apache/phoenix/join/HashJoinInfo.java   |  48 +++--
 .../org/apache/phoenix/query/QueryServices.java |   5 +
 .../phoenix/query/QueryServicesOptions.java |   3 +
 8 files changed, 298 insertions(+), 38 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2625274e/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryIT.java
index 867eb2a..f453853 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryIT.java
@@ -861,7 +861,7 @@ public class QueryIT extends BaseClientManagedTimeIT {
 try {
 PreparedStatement statement = conn.prepareStatement(query);
 statement.setString(1, tenantId);
-statement.setString(2,ROW4);
+statement.setString(2, ROW4);
 ResultSet rs = statement.executeQuery();
 assertTrue(rs.next());
 assertEquals(A_VALUE, rs.getString(1));
@@ -879,17 +879,17 @@ public class QueryIT extends BaseClientManagedTimeIT {
 
 byte[] tableName = Bytes.toBytes(ATABLE_NAME);
 admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
-HTable htable = 
(HTable)conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(tableName);
+HTable htable = (HTable) 
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(tableName);
 htable.clearRegionCache();
 int nRegions = htable.getRegionLocations().size();
-admin.split(tableName, ByteUtil.concat(Bytes.toBytes(tenantId), 
Bytes.toBytes(00A + Character.valueOf((char)('3' + nextRunCount()))+ ts))); 
// vary split point with test run
+admin.split(tableName, ByteUtil.concat(Bytes.toBytes(tenantId), 
Bytes.toBytes(00A + Character.valueOf((char) ('3' + nextRunCount())) + ts))); 
// vary split point with test run
 int retryCount = 0;
 do {
 Thread.sleep(2000);
 retryCount++;
 //htable.clearRegionCache();
 } while (retryCount  10  htable.getRegionLocations().size() == 
nRegions);
-assertNotEquals(nRegions,htable.getRegionLocations().size());
+assertNotEquals(nRegions, htable.getRegionLocations().size());
 
 statement.setString(1, tenantId);
 rs = statement.executeQuery();
@@ -906,9 +906,10 @@ public class QueryIT extends BaseClientManagedTimeIT {
 assertEquals(E_VALUE, rs.getString(2));
assertEquals(1, rs.getLong(3));
 assertFalse(rs.next());
-
 } finally {
+if (admin != null) {
 admin.close();
+}
 conn.close();
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2625274e/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
index 4c907d6..06d5f89 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementContext.java
@@ -91,6 +91,29 @@ public class StatementContext {
 this.whereConditionColumns = new ArrayListPairbyte[],byte[]();
 }
 
+/**
+ * Copy constructor where 

Apache-Phoenix | 4.0 | Hadoop1 | Build Successful

2014-07-08 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastCompletedBuild/testReport/

Changes
[gabrielr] PHOENIX-539 Chunked loading of parallel scanning