Apache-Phoenix | 4.0 | Build Successful

2014-10-15 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1214 SYSTEM.CATALOG cannot be created when first connection to cluster is tenant-specific (Jan Van Besien)



Apache-Phoenix | Master | Build Successful

2014-10-15 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1214 SYSTEM.CATALOG cannot be created when first connection to cluster is tenant-specific (Jan Van Besien)



git commit: PHOENIX-1214 SYSTEM.CATALOG cannot be created when first connection to cluster is tenant-specific (Jan Van Besien)

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 19ba98dca -> e1da2c34b


PHOENIX-1214 SYSTEM.CATALOG cannot be created when first connection to cluster 
is tenant-specific (Jan Van Besien)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e1da2c34
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e1da2c34
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e1da2c34

Branch: refs/heads/master
Commit: e1da2c34b987a033c2c4a8767b7fc93e0404f730
Parents: 19ba98d
Author: James Taylor 
Authored: Wed Oct 15 22:18:07 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 22:18:07 2014 -0700

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java |  4 ++-
 .../query/ConnectionQueryServicesImpl.java  |  4 ++-
 .../query/ConnectionlessQueryServicesImpl.java  |  4 ++-
 .../java/org/apache/phoenix/util/JDBCUtil.java  | 18 +++-
 .../apache/phoenix/jdbc/PhoenixDriverTest.java  | 31 ++--
 .../org/apache/phoenix/util/JDBCUtilTest.java   |  9 ++
 6 files changed, 51 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e1da2c34/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index 6235d54..7e83f5f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -30,10 +30,12 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.Properties;
 
 import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixTestDriver;
 import org.apache.phoenix.schema.PArrayDataType;
 import org.apache.phoenix.schema.PDataType;
 import org.apache.phoenix.util.CSVCommonsLoader;
@@ -156,7 +158,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 new StringReader(statements), null);
 globalConn.close();
 
-tenantConn = DriverManager.getConnection(getUrl() + 
";TenantId=acme").unwrap(
+tenantConn = new PhoenixTestDriver().connect(getUrl() + 
";TenantId=acme", new Properties()).unwrap(
 PhoenixConnection.class);
 
 // Upsert CSV file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e1da2c34/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index a282941..53a1d66 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -126,6 +126,7 @@ import org.apache.phoenix.schema.stats.PTableStats;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.Closeables;
 import org.apache.phoenix.util.ConfigUtil;
+import org.apache.phoenix.util.JDBCUtil;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixContextExecutor;
 import org.apache.phoenix.util.PhoenixRuntime;
@@ -1502,8 +1503,9 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
+String globalUrl = JDBCUtil.removeProperty(url, 
PhoenixRuntime.TENANT_ID_ATTRIB);
 metaConnection = new PhoenixConnection(
-ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
+ConnectionQueryServicesImpl.this, 
globalUrl, scnProps, newEmptyMetaData());
 try {
 
metaConnection.createStatement().executeUpdate(QueryConstants.CREATE_TABLE_METADATA);
 } catch (NewerTableAlreadyExistsException ignore) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e1da2c34/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionlessQueryServicesImpl.java

git commit: PHOENIX-1214 SYSTEM.CATALOG cannot be created when first connection to cluster is tenant-specific (Jan Van Besien)

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 6f2758c40 -> 6333e70af


PHOENIX-1214 SYSTEM.CATALOG cannot be created when first connection to cluster 
is tenant-specific (Jan Van Besien)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6333e70a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6333e70a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6333e70a

Branch: refs/heads/4.0
Commit: 6333e70afbcb49efb680b5ab1d062655b32dbfd2
Parents: 6f2758c
Author: James Taylor 
Authored: Wed Oct 15 22:17:28 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 22:17:28 2014 -0700

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java |  4 ++-
 .../query/ConnectionQueryServicesImpl.java  |  4 ++-
 .../query/ConnectionlessQueryServicesImpl.java  |  4 ++-
 .../java/org/apache/phoenix/util/JDBCUtil.java  | 18 +++-
 .../apache/phoenix/jdbc/PhoenixDriverTest.java  | 31 ++--
 .../org/apache/phoenix/util/JDBCUtilTest.java   |  9 ++
 6 files changed, 51 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6333e70a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index 9f36b93..f51f908 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -30,10 +30,12 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.Properties;
 
 import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixTestDriver;
 import org.apache.phoenix.schema.PArrayDataType;
 import org.apache.phoenix.schema.PDataType;
 import org.apache.phoenix.util.CSVCommonsLoader;
@@ -156,7 +158,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 new StringReader(statements), null);
 globalConn.close();
 
-tenantConn = DriverManager.getConnection(getUrl() + 
";TenantId=acme").unwrap(
+tenantConn = new PhoenixTestDriver().connect(getUrl() + 
";TenantId=acme", new Properties()).unwrap(
 PhoenixConnection.class);
 
 // Upsert CSV file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6333e70a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 1b76900..232a284 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -126,6 +126,7 @@ import org.apache.phoenix.schema.stats.PTableStats;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.Closeables;
 import org.apache.phoenix.util.ConfigUtil;
+import org.apache.phoenix.util.JDBCUtil;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixContextExecutor;
 import org.apache.phoenix.util.PhoenixRuntime;
@@ -1502,8 +1503,9 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 PhoenixRuntime.CURRENT_SCN_ATTRIB,
 
Long.toString(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP));
 scnProps.remove(PhoenixRuntime.TENANT_ID_ATTRIB);
+String globalUrl = JDBCUtil.removeProperty(url, 
PhoenixRuntime.TENANT_ID_ATTRIB);
 metaConnection = new PhoenixConnection(
-ConnectionQueryServicesImpl.this, url, 
scnProps, newEmptyMetaData());
+ConnectionQueryServicesImpl.this, 
globalUrl, scnProps, newEmptyMetaData());
 try {
 
metaConnection.createStatement().executeUpdate(QueryConstants.CREATE_TABLE_METADATA);
 } catch (NewerTableAlreadyExistsException ignore) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6333e70a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionlessQueryServicesImpl.java
--

Jenkins build is back to normal : Phoenix | Master #428

2014-10-15 Thread Apache Jenkins Server
See 



Build failed in Jenkins: Phoenix | Master #427

2014-10-15 Thread Apache Jenkins Server
See 

Changes:

[jtaylor] Correcting test annotation for SpillableGroupByIT

--
[...truncated 482 lines...]
Running org.apache.phoenix.end2end.UpsertSelectIT
Tests run: 91, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.777 sec - 
in org.apache.phoenix.end2end.GroupByIT
Running org.apache.phoenix.end2end.ProductMetricsIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.027 sec - 
in org.apache.phoenix.end2end.UpsertSelectIT
Running org.apache.phoenix.end2end.UpsertValuesIT
Tests run: 49, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 40.096 sec - 
in org.apache.phoenix.end2end.CastAndCoerceIT
Running org.apache.phoenix.end2end.salted.SaltedTableIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.534 sec - in 
org.apache.phoenix.end2end.salted.SaltedTableIT
Running org.apache.phoenix.end2end.ScanQueryIT
Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.244 sec - 
in org.apache.phoenix.end2end.ProductMetricsIT
Running org.apache.phoenix.end2end.CreateTableIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.798 sec - 
in org.apache.phoenix.end2end.UpsertValuesIT
Running org.apache.phoenix.end2end.CompareDecimalToLongIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.864 sec - in 
org.apache.phoenix.end2end.CompareDecimalToLongIT
Running org.apache.phoenix.end2end.ArrayIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.248 sec - 
in org.apache.phoenix.end2end.CreateTableIT
Running org.apache.phoenix.end2end.TruncateFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.701 sec - in 
org.apache.phoenix.end2end.TruncateFunctionIT
Running org.apache.phoenix.end2end.RowValueConstructorIT
Tests run: 48, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.056 sec - 
in org.apache.phoenix.end2end.ArrayIT
Running org.apache.phoenix.end2end.StddevIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.317 sec - in 
org.apache.phoenix.end2end.StddevIT
Running org.apache.phoenix.end2end.NotQueryIT
Tests run: 34, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.954 sec - 
in org.apache.phoenix.end2end.RowValueConstructorIT
Running org.apache.phoenix.end2end.IsNullIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.717 sec - in 
org.apache.phoenix.end2end.IsNullIT
Running org.apache.phoenix.end2end.PercentileIT
Tests run: 17, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.005 sec - in 
org.apache.phoenix.end2end.PercentileIT
Running org.apache.phoenix.end2end.DistinctCountIT
Tests run: 119, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 71.302 sec - 
in org.apache.phoenix.end2end.ScanQueryIT
Running org.apache.phoenix.end2end.InMemoryOrderByIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.806 sec - in 
org.apache.phoenix.end2end.InMemoryOrderByIT
Running org.apache.phoenix.end2end.SkipRangeParallelIteratorRegionSplitterIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 8, Time elapsed: 0.114 sec - in 
org.apache.phoenix.end2end.SkipRangeParallelIteratorRegionSplitterIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.044 sec - in 
org.apache.phoenix.end2end.DistinctCountIT
Tests run: 203, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 120.356 sec - 
in org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT
Tests run: 77, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 48.879 sec - 
in org.apache.phoenix.end2end.NotQueryIT
Tests run: 182, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 174.414 sec - 
in org.apache.phoenix.end2end.QueryIT

Results :

Tests run: 1225, Failures: 0, Errors: 0, Skipped: 8

[INFO] 
[INFO] --- maven-failsafe-plugin:2.17:integration-test (HBaseManagedTimeTests) 
@ phoenix-core ---
[INFO] Failsafe report directory: 

[INFO] parallel='none', perCoreThreadCount=true, threadCount=0, 
useUnlimitedThreads=false, threadCountSuites=0, threadCountClasses=0, 
threadCountMethods=0, parallelOptimized=true

---
 T E S T S
---

---
 T E S T S
---
Running org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
Running org.apache.phoenix.end2end.EncodeFunctionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.402 sec - in 
org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
Running org.apache.phoenix.trace.PhoenixTraceReaderIT
Running org.apache.phoenix.trace.PhoenixTracingEndToEndIT
Running org.apache.phoenix.end2end.SkipScanAfterManualSplitIT
Running org.apache.phoenix.end2end.CSVCommonsLoaderIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.633 sec - in 
org.apache.ph

Apache-Phoenix | 4.0 | Build Successful

2014-10-15 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-10-15 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error



git commit: PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 986566e50 -> d68354d44


PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d68354d4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d68354d4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d68354d4

Branch: refs/heads/3.0
Commit: d68354d442376efd63dfeec47606f596de9ec475
Parents: 986566e
Author: James Taylor 
Authored: Wed Oct 15 16:41:32 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 16:44:55 2014 -0700

--
 .../org/apache/phoenix/exception/SQLExceptionCode.java  |  4 +++-
 .../main/java/org/apache/phoenix/parse/ColumnDef.java   |  4 
 .../org/apache/phoenix/compile/QueryCompilerTest.java   | 12 
 3 files changed, 19 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d68354d4/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index a8775f9..b703108 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -148,7 +148,9 @@ public enum SQLExceptionCode {
  */
  READ_ONLY_CONNECTION(518,"25502","Mutations are not permitted for a 
read-only connection."),
  
-/** 
+ VARBINARY_ARRAY_NOT_SUPPORTED(519, "42896", "VARBINARY ARRAY is not 
supported"),
+
+ /** 
  * HBase and Phoenix specific implementation defined sub-classes.
  * Column family related exceptions.
  * 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d68354d4/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
index 3d13ee0..e9da47c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
@@ -61,6 +61,10 @@ public class ColumnDef {
 localType = sqlTypeName == null ? null : 
PDataType.fromTypeId(PDataType.sqlArrayType(SchemaUtil.normalizeIdentifier(sqlTypeName)));
 this.dataType = sqlTypeName == null ? null : 
PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
  this.arrSize = arrSize; // Can only be non negative based on 
parsing
+ if (this.dataType == PDataType.VARBINARY) {
+ throw new 
SQLExceptionInfo.Builder(SQLExceptionCode.VARBINARY_ARRAY_NOT_SUPPORTED)
+ 
.setColumnName(columnDefName.getColumnName()).build().buildException();
+ }
  } else {
  this.dataType = sqlTypeName == null ? null : 
PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
  this.arrSize = null;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d68354d4/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 9a84bac..4acb55c 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -1293,6 +1293,18 @@ public class QueryCompilerTest extends 
BaseConnectionlessQueryTest {
 }
 
 @Test
+public void testVarbinaryArrayNotSupported() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+try {
+conn.createStatement().execute("CREATE TABLE t (k VARCHAR PRIMARY 
KEY, a VARBINARY[10])");
+fail();
+} catch (SQLException e) {
+
assertEquals(SQLExceptionCode.VARBINARY_ARRAY_NOT_SUPPORTED.getErrorCode(), 
e.getErrorCode());
+}
+conn.close();
+}
+
+@Test
 public void testInvalidNextValueFor() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 conn.createStatement().execute("CREATE SEQUENCE alpha.zeta");



git commit: PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 fe2812955 -> 6f2758c40


PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6f2758c4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6f2758c4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6f2758c4

Branch: refs/heads/4.0
Commit: 6f2758c4083110818a1206c498d8415cb9421206
Parents: fe28129
Author: James Taylor 
Authored: Wed Oct 15 16:41:32 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 16:42:24 2014 -0700

--
 .../org/apache/phoenix/exception/SQLExceptionCode.java  |  4 +++-
 .../main/java/org/apache/phoenix/parse/ColumnDef.java   |  4 
 .../org/apache/phoenix/compile/QueryCompilerTest.java   | 12 
 3 files changed, 19 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f2758c4/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index 6de23d3..7782451 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -148,7 +148,9 @@ public enum SQLExceptionCode {
  */
  READ_ONLY_CONNECTION(518,"25502","Mutations are not permitted for a 
read-only connection."),
  
-/** 
+ VARBINARY_ARRAY_NOT_SUPPORTED(519, "42896", "VARBINARY ARRAY is not 
supported"),
+
+ /** 
  * HBase and Phoenix specific implementation defined sub-classes.
  * Column family related exceptions.
  * 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f2758c4/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
index 3d13ee0..e9da47c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
@@ -61,6 +61,10 @@ public class ColumnDef {
 localType = sqlTypeName == null ? null : 
PDataType.fromTypeId(PDataType.sqlArrayType(SchemaUtil.normalizeIdentifier(sqlTypeName)));
 this.dataType = sqlTypeName == null ? null : 
PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
  this.arrSize = arrSize; // Can only be non negative based on 
parsing
+ if (this.dataType == PDataType.VARBINARY) {
+ throw new 
SQLExceptionInfo.Builder(SQLExceptionCode.VARBINARY_ARRAY_NOT_SUPPORTED)
+ 
.setColumnName(columnDefName.getColumnName()).build().buildException();
+ }
  } else {
  this.dataType = sqlTypeName == null ? null : 
PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
  this.arrSize = null;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f2758c4/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 9a84bac..4acb55c 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -1293,6 +1293,18 @@ public class QueryCompilerTest extends 
BaseConnectionlessQueryTest {
 }
 
 @Test
+public void testVarbinaryArrayNotSupported() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+try {
+conn.createStatement().execute("CREATE TABLE t (k VARCHAR PRIMARY 
KEY, a VARBINARY[10])");
+fail();
+} catch (SQLException e) {
+
assertEquals(SQLExceptionCode.VARBINARY_ARRAY_NOT_SUPPORTED.getErrorCode(), 
e.getErrorCode());
+}
+conn.close();
+}
+
+@Test
 public void testInvalidNextValueFor() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 conn.createStatement().execute("CREATE SEQUENCE alpha.zeta");



git commit: PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master d96685ceb -> 19ba98dca


PHOENIX-1330 Flag VARBINARY VARBINARY ARRAY declaration in DDL as an error


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/19ba98dc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/19ba98dc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/19ba98dc

Branch: refs/heads/master
Commit: 19ba98dca15ffc04399513f358732a1716d838af
Parents: d96685c
Author: James Taylor 
Authored: Wed Oct 15 16:41:32 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 16:41:32 2014 -0700

--
 .../org/apache/phoenix/exception/SQLExceptionCode.java  |  4 +++-
 .../main/java/org/apache/phoenix/parse/ColumnDef.java   |  4 
 .../org/apache/phoenix/compile/QueryCompilerTest.java   | 12 
 3 files changed, 19 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/19ba98dc/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index 6de23d3..7782451 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -148,7 +148,9 @@ public enum SQLExceptionCode {
  */
  READ_ONLY_CONNECTION(518,"25502","Mutations are not permitted for a 
read-only connection."),
  
-/** 
+ VARBINARY_ARRAY_NOT_SUPPORTED(519, "42896", "VARBINARY ARRAY is not 
supported"),
+
+ /** 
  * HBase and Phoenix specific implementation defined sub-classes.
  * Column family related exceptions.
  * 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/19ba98dc/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
index 3d13ee0..e9da47c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
@@ -61,6 +61,10 @@ public class ColumnDef {
 localType = sqlTypeName == null ? null : 
PDataType.fromTypeId(PDataType.sqlArrayType(SchemaUtil.normalizeIdentifier(sqlTypeName)));
 this.dataType = sqlTypeName == null ? null : 
PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
  this.arrSize = arrSize; // Can only be non negative based on 
parsing
+ if (this.dataType == PDataType.VARBINARY) {
+ throw new 
SQLExceptionInfo.Builder(SQLExceptionCode.VARBINARY_ARRAY_NOT_SUPPORTED)
+ 
.setColumnName(columnDefName.getColumnName()).build().buildException();
+ }
  } else {
  this.dataType = sqlTypeName == null ? null : 
PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
  this.arrSize = null;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/19ba98dc/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 9a84bac..4acb55c 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -1293,6 +1293,18 @@ public class QueryCompilerTest extends 
BaseConnectionlessQueryTest {
 }
 
 @Test
+public void testVarbinaryArrayNotSupported() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+try {
+conn.createStatement().execute("CREATE TABLE t (k VARCHAR PRIMARY 
KEY, a VARBINARY[10])");
+fail();
+} catch (SQLException e) {
+
assertEquals(SQLExceptionCode.VARBINARY_ARRAY_NOT_SUPPORTED.getErrorCode(), 
e.getErrorCode());
+}
+conn.close();
+}
+
+@Test
 public void testInvalidNextValueFor() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 conn.createStatement().execute("CREATE SEQUENCE alpha.zeta");



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-10-15 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] Add missing commit in unit test

[jtaylor] Backport fixes for PHOENIX-1149 (view index ignored if no where clause) and PHOENIX-933 (IndexMaintainer fixes for view index)

[jtaylor] Correcting test annotation for SpillableGroupByIT



Apache-Phoenix | 4.0 | Build Successful

2014-10-15 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0/lastCompletedBuild/testReport/

Changes
[jtaylor] Correcting test annotation for SpillableGroupByIT



Build failed in Jenkins: Phoenix | Master #426

2014-10-15 Thread Apache Jenkins Server
See 

Changes:

[jtaylor] Add missing commit in unit test

--
[...truncated 648 lines...]
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:100)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:106)
at 
org.apache.phoenix.cache.aggcache.SpillableGroupByCache.(SpillableGroupByCache.java:150)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver$GroupByCacheFactory.newCache(GroupedAggregateRegionObserver.java:365)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:400)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:161)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:144)
... 8 more

at 
org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:107)
at 
org.apache.phoenix.iterate.TableResultIterator.(TableResultIterator.java:57)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:627)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:622)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException: 
_LOCAL_IDX_T,,1413406992039.c368902f0c61d676941e5914d0268e65.: Requested memory 
of 124116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
at 
org.apache.phoenix.util.ServerUtil.throwIOException(ServerUtil.java:51)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:162)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postScannerOpen(RegionCoprocessorHost.java:1845)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3092)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29497)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2027)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:98)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.phoenix.memory.InsufficientMemoryException: Requested 
memory of 124116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocateBytes(GlobalMemoryManager.java:72)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:100)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:106)
at 
org.apache.phoenix.cache.aggcache.SpillableGroupByCache.(SpillableGroupByCache.java:150)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver$GroupByCacheFactory.newCache(GroupedAggregateRegionObserver.java:365)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:400)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:161)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:144)
... 8 more

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at 
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at 
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
at 
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:285)
at 
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:316)
at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:164)
at 
org.apache.hadoop.hbase.client.Sca

git commit: Correcting test annotation for SpillableGroupByIT

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 203da64df -> d96685ceb


Correcting test annotation for SpillableGroupByIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d96685ce
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d96685ce
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d96685ce

Branch: refs/heads/master
Commit: d96685ceb6fe51a53185a612ffc99de306fc81ce
Parents: 203da64
Author: James Taylor 
Authored: Wed Oct 15 15:58:05 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 16:00:23 2014 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d96685ce/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
index d3045f8..36bbca8 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
@@ -47,7 +47,7 @@ import com.google.common.collect.Maps;
  * and we wouldn't want that to be set for other tests sharing the same
  * cluster.
  */
-@Category(HBaseManagedTimeTest.class)
+@Category(NeedsOwnMiniClusterTest.class)
 public class SpillableGroupByIT extends BaseOwnClusterHBaseManagedTimeIT {
 
 private static final int NUM_ROWS_INSERTED = 1000;



git commit: Correcting test annotation for SpillableGroupByIT

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 c8d416942 -> fe2812955


Correcting test annotation for SpillableGroupByIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fe281295
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fe281295
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fe281295

Branch: refs/heads/4.0
Commit: fe2812955d83ba62c184f2c2b70d4239e9f59926
Parents: c8d4169
Author: James Taylor 
Authored: Wed Oct 15 15:58:05 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 16:00:04 2014 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fe281295/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
index d3045f8..36bbca8 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
@@ -47,7 +47,7 @@ import com.google.common.collect.Maps;
  * and we wouldn't want that to be set for other tests sharing the same
  * cluster.
  */
-@Category(HBaseManagedTimeTest.class)
+@Category(NeedsOwnMiniClusterTest.class)
 public class SpillableGroupByIT extends BaseOwnClusterHBaseManagedTimeIT {
 
 private static final int NUM_ROWS_INSERTED = 1000;



[1/3] git commit: Add missing commit in unit test

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 532b5d4bc -> 986566e50


Add missing commit in unit test


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/58426a38
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/58426a38
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/58426a38

Branch: refs/heads/3.0
Commit: 58426a3818ee4b7b9fb9d83207f34a68457d1251
Parents: 532b5d4
Author: James Taylor 
Authored: Wed Oct 15 12:56:31 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 12:58:34 2014 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java | 1 +
 .../src/main/java/org/apache/phoenix/schema/MetaDataClient.java| 2 ++
 2 files changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/58426a38/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index 5e1cb9d..5a587aa 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -107,6 +107,7 @@ public class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 conn.createStatement().execute("CREATE INDEX i1 on v(k3) include (s)");
 conn.createStatement().execute("UPSERT INTO v(k2,S,k3) 
VALUES(120,'foo',50.0)");
+conn.commit();
 
 analyzeTable(conn, "v");
 List splits = getAllSplits(conn, "i1");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/58426a38/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 6a852a2..9d50a07 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -478,6 +478,8 @@ public class MetaDataClient {
 
 public MutationState updateStatistics(UpdateStatisticsStatement 
updateStatisticsStmt)
 throws SQLException {
+// Don't mistakenly commit pending rows
+connection.rollback();
 // Check before updating the stats if we have reached the configured 
time to reupdate the stats once again
 ColumnResolver resolver = 
FromCompiler.getResolver(updateStatisticsStmt, connection);
 PTable table = resolver.getTables().get(0).getTable();



[3/3] git commit: Correcting test annotation for SpillableGroupByIT

2014-10-15 Thread jamestaylor
Correcting test annotation for SpillableGroupByIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/986566e5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/986566e5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/986566e5

Branch: refs/heads/3.0
Commit: 986566e50e856224946ba94684aedfc89202d6c3
Parents: abffd3f
Author: James Taylor 
Authored: Wed Oct 15 15:58:05 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 15:58:05 2014 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/986566e5/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
index d3045f8..36bbca8 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SpillableGroupByIT.java
@@ -47,7 +47,7 @@ import com.google.common.collect.Maps;
  * and we wouldn't want that to be set for other tests sharing the same
  * cluster.
  */
-@Category(HBaseManagedTimeTest.class)
+@Category(NeedsOwnMiniClusterTest.class)
 public class SpillableGroupByIT extends BaseOwnClusterHBaseManagedTimeIT {
 
 private static final int NUM_ROWS_INSERTED = 1000;



[2/3] git commit: Backport fixes for PHOENIX-1149 (view index ignored if no where clause) and PHOENIX-933 (IndexMaintainer fixes for view index)

2014-10-15 Thread jamestaylor
Backport fixes for PHOENIX-1149 (view index ignored if no where clause) and 
PHOENIX-933 (IndexMaintainer fixes for view index)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/abffd3fc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/abffd3fc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/abffd3fc

Branch: refs/heads/3.0
Commit: abffd3fc7614ca70e040cdc4c752ee90e4b3b270
Parents: 58426a3
Author: James Taylor 
Authored: Wed Oct 15 15:57:23 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 15:57:23 2014 -0700

--
 .../org/apache/phoenix/end2end/BaseViewIT.java  |  7 ++--
 .../apache/phoenix/compile/WhereOptimizer.java  |  5 +--
 .../apache/phoenix/index/IndexMaintainer.java   | 38 
 3 files changed, 38 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/abffd3fc/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index 5a587aa..6466932 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -132,14 +132,13 @@ public class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 conn.createStatement().execute("CREATE INDEX i2 on v(s)");
 
 // new index hasn't been analyzed yet
-//splits = getAllSplits(conn, "i2");
-//assertEquals(saltBuckets == null ? 1 : 3, splits.size());
+splits = getAllSplits(conn, "i2");
+assertEquals(saltBuckets == null ? 1 : 3, splits.size());
 
 // analyze table should analyze all view data
 analyzeTable(conn, "t");
 splits = getAllSplits(conn, "i2");
-//assertEquals(saltBuckets == null ? 6 : 8, splits.size());
-assertEquals(saltBuckets == null ? 11 : 13, splits.size());
+assertEquals(saltBuckets == null ? 6 : 8, splits.size());
 
 query = "SELECT k1, k2, s FROM v WHERE s = 'foo'";
 rs = conn.createStatement().executeQuery(query);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/abffd3fc/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
index 29ad6ee..94293a4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
@@ -105,7 +105,8 @@ public class WhereOptimizer {
 Expression whereClause, Set extractNodes) {
 PName tenantId = context.getConnection().getTenantId();
 PTable table = context.getCurrentTable().getTable();
-if (whereClause == null && (tenantId == null || 
!table.isMultiTenant())) {
+boolean isEverything = (tenantId == null || !table.isMultiTenant()) && 
table.getViewIndexId() == null;
+if (whereClause == null && isEverything) {
 context.setScanRanges(ScanRanges.EVERYTHING);
 return whereClause;
 }
@@ -121,7 +122,7 @@ public class WhereOptimizer {
 // becomes consistent.
 keySlots = whereClause.accept(visitor);
 
-if (keySlots == null && (tenantId == null || 
!table.isMultiTenant())) {
+if (keySlots == null && isEverything) {
 context.setScanRanges(ScanRanges.EVERYTHING);
 return whereClause;
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/abffd3fc/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
index 5a3f7c8..eb9d15d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
@@ -235,10 +235,20 @@ public class IndexMaintainer implements Writable, 
Iterable {
 byte[] indexTableName = index.getPhysicalName().getBytes();
 Integer nIndexSaltBuckets = index.getBucketNum();
 boolean indexWALDisabled = index.isWALDisabled();
-int indexPosOffset = (index.getBucketNum() == null ? 0 : 1) + 
(this.isMultiTenant ? 1 : 0) + (this.viewIndexId == null ? 0 : 1);
+int indexPosOffset = (index.ge

Build failed in Jenkins: Phoenix | Master #425

2014-10-15 Thread Apache Jenkins Server
See 

Changes:

[jtaylor] PHOENIX-1313 Investigate why 
LocalIndexIT.testLocalIndexScanAfterRegionSplit() is failing (Rajeshbabu)

--
[...truncated 2536 lines...]
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:627)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:622)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)

charDatatype(org.apache.phoenix.end2end.LastValueFunctionIT)  Time elapsed: 
0.104 sec  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException: 
LAST_VALUE_TABLE,,1413399714017.8759e05831076837d5d64f0824119498.: Requested 
memory of 196116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
at 
org.apache.phoenix.util.ServerUtil.throwIOException(ServerUtil.java:51)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:162)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postScannerOpen(RegionCoprocessorHost.java:1845)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3092)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29497)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2027)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:98)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.phoenix.memory.InsufficientMemoryException: Requested 
memory of 196116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocateBytes(GlobalMemoryManager.java:72)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:100)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:106)
at 
org.apache.phoenix.cache.aggcache.SpillableGroupByCache.(SpillableGroupByCache.java:150)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver$GroupByCacheFactory.newCache(GroupedAggregateRegionObserver.java:365)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:400)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:161)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:144)
... 8 more

at java.util.concurrent.FutureTask$Sync.innerGet(FutureTask.java:262)
at java.util.concurrent.FutureTask.get(FutureTask.java:119)
at 
org.apache.phoenix.iterate.ParallelIterators.getIterators(ParallelIterators.java:523)
at 
org.apache.phoenix.iterate.MergeSortResultIterator.getIterators(MergeSortResultIterator.java:48)
at 
org.apache.phoenix.iterate.MergeSortResultIterator.minIterator(MergeSortResultIterator.java:63)
at 
org.apache.phoenix.iterate.MergeSortResultIterator.next(MergeSortResultIterator.java:90)
at 
org.apache.phoenix.iterate.GroupedAggregatingResultIterator.next(GroupedAggregatingResultIterator.java:68)
at 
org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixResultSet.java:734)
at 
org.apache.phoenix.end2end.LastValueFunctionIT.charDatatype(LastValueFunctionIT.java:176)
Caused by: org.apache.phoenix.exception.PhoenixIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException: 
LAST_VALUE_TABLE,,1413399714017.8759e05831076837d5d64f0824119498.: Requested 
memory of 196116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
at 
org.apache.phoenix.util.ServerUtil.throwIOException(ServerUtil.java:51)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:162)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postScannerOpen(RegionCoprocessorHost.java:1845)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3092)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientServi

Apache-Phoenix | 4.0 | Build Successful

2014-10-15 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0/lastCompletedBuild/testReport/

Changes
[jtaylor] Add missing commit in unit test



git commit: Add missing commit in unit test

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 050643e78 -> 203da64df


Add missing commit in unit test


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/203da64d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/203da64d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/203da64d

Branch: refs/heads/master
Commit: 203da64df345ee38dbf9a6ef0727551e80eb6eb7
Parents: 050643e
Author: James Taylor 
Authored: Wed Oct 15 12:56:31 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 12:57:20 2014 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java | 1 +
 .../src/main/java/org/apache/phoenix/schema/MetaDataClient.java| 2 ++
 2 files changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/203da64d/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index 2fe7fd1..c5a3b26 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -113,6 +113,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 conn.createStatement().execute("CREATE INDEX i1 on v(k3) include 
(s)");
 }
 conn.createStatement().execute("UPSERT INTO v(k2,S,k3) 
VALUES(120,'foo',50.0)");
+conn.commit();
 
 analyzeTable(conn, "v");
 List splits = getAllSplits(conn, "i1");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/203da64d/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index e342c6e..3c02456 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -489,6 +489,8 @@ public class MetaDataClient {
 
 public MutationState updateStatistics(UpdateStatisticsStatement 
updateStatisticsStmt)
 throws SQLException {
+// Don't mistakenly commit pending rows
+connection.rollback();
 // Check before updating the stats if we have reached the configured 
time to reupdate the stats once again
 ColumnResolver resolver = 
FromCompiler.getResolver(updateStatisticsStmt, connection);
 PTable table = resolver.getTables().get(0).getTable();



git commit: Add missing commit in unit test

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 54deac35d -> c8d416942


Add missing commit in unit test


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c8d41694
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c8d41694
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c8d41694

Branch: refs/heads/4.0
Commit: c8d4169423d4a4ea48342bffbe808e695df3f2f9
Parents: 54deac3
Author: James Taylor 
Authored: Wed Oct 15 12:56:31 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 12:56:31 2014 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java | 1 +
 .../src/main/java/org/apache/phoenix/schema/MetaDataClient.java| 2 ++
 2 files changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c8d41694/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index 2fe7fd1..c5a3b26 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -113,6 +113,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 conn.createStatement().execute("CREATE INDEX i1 on v(k3) include 
(s)");
 }
 conn.createStatement().execute("UPSERT INTO v(k2,S,k3) 
VALUES(120,'foo',50.0)");
+conn.commit();
 
 analyzeTable(conn, "v");
 List splits = getAllSplits(conn, "i1");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c8d41694/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index e342c6e..3c02456 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -489,6 +489,8 @@ public class MetaDataClient {
 
 public MutationState updateStatistics(UpdateStatisticsStatement 
updateStatisticsStmt)
 throws SQLException {
+// Don't mistakenly commit pending rows
+connection.rollback();
 // Check before updating the stats if we have reached the configured 
time to reupdate the stats once again
 ColumnResolver resolver = 
FromCompiler.getResolver(updateStatisticsStmt, connection);
 PTable table = resolver.getTables().get(0).getTable();



Apache-Phoenix | 4.0 | Build Successful

2014-10-15 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() is failing (Rajeshbabu)



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-10-15 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() is failing (Rajeshbabu)



git commit: PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() is failing (Rajeshbabu)

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 a32b488bc -> 532b5d4bc


PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() 
is failing (Rajeshbabu)

Conflicts:

phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java

phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java

phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/532b5d4b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/532b5d4b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/532b5d4b

Branch: refs/heads/3.0
Commit: 532b5d4bc80bb7e51a10efe6940d1c9cd01bf5f7
Parents: a32b488
Author: James Taylor 
Authored: Wed Oct 15 11:43:47 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 11:56:40 2014 -0700

--
 .../org/apache/phoenix/compile/ScanRanges.java  |  8 +++-
 .../phoenix/iterate/ParallelIterators.java  | 47 +---
 2 files changed, 36 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/532b5d4b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
index d5d2280..d60a288 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
@@ -347,8 +347,12 @@ public class ScanRanges {
 scanStopKey = prefixKey(scanStopKey, scanKeyOffset, 
prefixBytes, keyOffset);
 }
 }
-if (scanStopKey.length > 0 && Bytes.compareTo(scanStartKey, 
scanStopKey) >= 0) {
-return null;
+// Don't let the stopRow of the scan go beyond the originalStopKey
+if (originalStopKey.length > 0 && Bytes.compareTo(scanStopKey, 
originalStopKey) > 0) {
+scanStopKey = originalStopKey;
+}
+if (scanStopKey.length > 0 && Bytes.compareTo(scanStartKey, 
scanStopKey) >= 0) { 
+return null; 
 }
 newScan.setStartRow(scanStartKey);
 newScan.setStopRow(scanStopKey);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/532b5d4b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
index 9fbee29..43fa4f5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ParallelIterators.java
@@ -17,6 +17,8 @@
  */
 package org.apache.phoenix.iterate;
 
+import static org.apache.phoenix.util.ByteUtil.EMPTY_BYTE_ARRAY;
+
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -62,7 +64,6 @@ import 
org.apache.phoenix.schema.StaleRegionBoundaryCacheException;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.schema.stats.GuidePostsInfo;
 import org.apache.phoenix.schema.stats.PTableStats;
-import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SQLCloseables;
 import org.apache.phoenix.util.ScanUtil;
@@ -156,7 +157,7 @@ public class ParallelIterators extends ExplainTable 
implements ResultIterators {
 doColumnProjectionOptimization(context, scan, table, statement);
 
 this.iteratorFactory = iteratorFactory;
-this.scans = getParallelScans(context.getScan());
+this.scans = getParallelScans();
 List splitRanges = 
Lists.newArrayListWithExpectedSize(scans.size() * 
ESTIMATED_GUIDEPOSTS_PER_REGION);
 for (List scanList : scans) {
 for (Scan aScan : scanList) {
@@ -374,6 +375,11 @@ public class ParallelIterators extends ExplainTable 
implements ResultIterators {
 }
 return scans;
 }
+
+private List> getParallelScans() throws SQLException {
+return getParallelScans(EMPTY_BYTE_ARRAY, EMPTY_BYTE_ARRAY);
+}
+
 /**
  * Compute the list of parallel scans to run for a given query. The inner 
scans
  * may be concatenated together directly, while the other ones may need to 
be
@@ -381,9 +387,11 @@ public class ParallelIterators extends ExplainTable 
implements ResultIterators {
  * @return list of parallel scans to run for 

git commit: PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() is failing (Rajeshbabu)

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 8ece1a74a -> 050643e78


PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() 
is failing (Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/050643e7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/050643e7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/050643e7

Branch: refs/heads/master
Commit: 050643e78d6e7cae7cdd0131353d87b0a7bb4507
Parents: 8ece1a7
Author: James Taylor 
Authored: Wed Oct 15 11:43:47 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 11:45:05 2014 -0700

--
 .../phoenix/end2end/index/LocalIndexIT.java | 11 ++-
 .../org/apache/phoenix/compile/ScanRanges.java  |  4 ++
 .../coprocessor/BaseScannerRegionObserver.java  | 20 +++---
 .../phoenix/iterate/ParallelIterators.java  | 73 +---
 4 files changed, 68 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/050643e7/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 376590a..ef3dc77 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -59,7 +59,6 @@ import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.collect.Maps;
@@ -633,7 +632,6 @@ public class LocalIndexIT extends BaseIndexIT {
 }
 
 @Test
-@Ignore // TODO: ask Rajeshbabu to take a look
 public void testLocalIndexScanAfterRegionSplit() throws Exception {
 createBaseTable(DATA_TABLE_NAME, null, "('e','j','o')");
 Connection conn1 = DriverManager.getConnection(getUrl());
@@ -690,10 +688,11 @@ public class LocalIndexIT extends BaseIndexIT {
 query = "SELECT t_id,k1,k3 FROM " + DATA_TABLE_NAME;
 rs = conn1.createStatement().executeQuery("EXPLAIN "+query);
 assertEquals(
-"CLIENT PARALLEL " + (4+i) + "-WAY RANGE SCAN OVER "
-+ 
MetaDataUtil.getLocalIndexTableName(DATA_TABLE_NAME)+" [-32767]\n"+
-"CLIENT MERGE SORT",
-QueryUtil.getExplainPlan(rs));
+"CLIENT PARALLEL "
++ ((strings[3 * i].compareTo("j") < 0) ? (4 + i) : 
(4 + i - 1))
++ "-WAY RANGE SCAN OVER "
++ 
MetaDataUtil.getLocalIndexTableName(DATA_TABLE_NAME) + " [-32767]\n"
++ "CLIENT MERGE SORT", 
QueryUtil.getExplainPlan(rs));
 rs = conn1.createStatement().executeQuery(query);
 Thread.sleep(1000);
 for (int j = 0; j < 26; j++) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/050643e7/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
index 923bcf3..0ab6368 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
@@ -347,6 +347,10 @@ public class ScanRanges {
 scanStopKey = prefixKey(scanStopKey, scanKeyOffset, 
prefixBytes, keyOffset);
 }
 }
+// Don't let the stopRow of the scan go beyond the originalStopKey
+if (originalStopKey.length > 0 && Bytes.compareTo(scanStopKey, 
originalStopKey) > 0) {
+scanStopKey = originalStopKey;
+}
 if (scanStopKey.length > 0 && Bytes.compareTo(scanStartKey, 
scanStopKey) >= 0) { 
 return null; 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/050643e7/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
index d65beee..68fa3d1 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
+++ 
b/phoenix-core

git commit: PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() is failing (Rajeshbabu)

2014-10-15 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 b85f99abd -> 54deac35d


PHOENIX-1313 Investigate why LocalIndexIT.testLocalIndexScanAfterRegionSplit() 
is failing (Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/54deac35
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/54deac35
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/54deac35

Branch: refs/heads/4.0
Commit: 54deac35d864a0885d8ac3624c4f4e6dd643ba89
Parents: b85f99a
Author: James Taylor 
Authored: Wed Oct 15 11:43:47 2014 -0700
Committer: James Taylor 
Committed: Wed Oct 15 11:43:47 2014 -0700

--
 .../phoenix/end2end/index/LocalIndexIT.java | 11 ++-
 .../org/apache/phoenix/compile/ScanRanges.java  |  4 ++
 .../coprocessor/BaseScannerRegionObserver.java  | 20 +++---
 .../phoenix/iterate/ParallelIterators.java  | 73 +---
 4 files changed, 68 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/54deac35/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 376590a..ef3dc77 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -59,7 +59,6 @@ import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.collect.Maps;
@@ -633,7 +632,6 @@ public class LocalIndexIT extends BaseIndexIT {
 }
 
 @Test
-@Ignore // TODO: ask Rajeshbabu to take a look
 public void testLocalIndexScanAfterRegionSplit() throws Exception {
 createBaseTable(DATA_TABLE_NAME, null, "('e','j','o')");
 Connection conn1 = DriverManager.getConnection(getUrl());
@@ -690,10 +688,11 @@ public class LocalIndexIT extends BaseIndexIT {
 query = "SELECT t_id,k1,k3 FROM " + DATA_TABLE_NAME;
 rs = conn1.createStatement().executeQuery("EXPLAIN "+query);
 assertEquals(
-"CLIENT PARALLEL " + (4+i) + "-WAY RANGE SCAN OVER "
-+ 
MetaDataUtil.getLocalIndexTableName(DATA_TABLE_NAME)+" [-32767]\n"+
-"CLIENT MERGE SORT",
-QueryUtil.getExplainPlan(rs));
+"CLIENT PARALLEL "
++ ((strings[3 * i].compareTo("j") < 0) ? (4 + i) : 
(4 + i - 1))
++ "-WAY RANGE SCAN OVER "
++ 
MetaDataUtil.getLocalIndexTableName(DATA_TABLE_NAME) + " [-32767]\n"
++ "CLIENT MERGE SORT", 
QueryUtil.getExplainPlan(rs));
 rs = conn1.createStatement().executeQuery(query);
 Thread.sleep(1000);
 for (int j = 0; j < 26; j++) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/54deac35/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
index 923bcf3..0ab6368 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
@@ -347,6 +347,10 @@ public class ScanRanges {
 scanStopKey = prefixKey(scanStopKey, scanKeyOffset, 
prefixBytes, keyOffset);
 }
 }
+// Don't let the stopRow of the scan go beyond the originalStopKey
+if (originalStopKey.length > 0 && Bytes.compareTo(scanStopKey, 
originalStopKey) > 0) {
+scanStopKey = originalStopKey;
+}
 if (scanStopKey.length > 0 && Bytes.compareTo(scanStartKey, 
scanStopKey) >= 0) { 
 return null; 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/54deac35/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
index d65beee..68fa3d1 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseScannerRegionObserver.java
+++ 
b/phoenix-core/src/m

Jenkins build is back to normal : Phoenix | 4.0 #384

2014-10-15 Thread Apache Jenkins Server
See 



Apache-Phoenix | 4.0 | Build Successful

2014-10-15 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0/lastCompletedBuild/testReport/

Changes