phoenix git commit: PHOENIX-1684 Functional Index using REGEXP_SUBSTR doesn't work correctly

2015-03-24 Thread tdsilva
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 af0d65a0a - c992c8694


PHOENIX-1684 Functional Index using REGEXP_SUBSTR doesn't work correctly


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c992c869
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c992c869
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c992c869

Branch: refs/heads/4.x-HBase-0.98
Commit: c992c86941798f960948df12ae40ee2d4a0ae820
Parents: af0d65a
Author: Thomas tdsi...@salesforce.com
Authored: Thu Mar 19 13:57:27 2015 -0700
Committer: Thomas tdsi...@salesforce.com
Committed: Tue Mar 24 15:49:49 2015 -0700

--
 .../end2end/index/IndexExpressionIT.java| 161 ++-
 .../phoenix/compile/PostIndexDDLCompiler.java   |   4 +-
 .../parse/IndexExpressionParseNodeRewriter.java |  30 +---
 .../apache/phoenix/schema/MetaDataClient.java   |   4 +-
 .../org/apache/phoenix/util/StringUtil.java |   5 +
 .../phoenix/compile/QueryCompilerTest.java  |  22 ++-
 6 files changed, 153 insertions(+), 73 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c992c869/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
index 1e3733b..0203e35 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
@@ -1202,54 +1202,60 @@ public class IndexExpressionIT extends 
BaseHBaseManagedTimeIT {
 
 @Test
 public void testViewUsesTableIndex() throws Exception {
-ResultSet rs;
 Connection conn = DriverManager.getConnection(getUrl());
-String ddl = CREATE TABLE t (k1 INTEGER NOT NULL, k2 INTEGER NOT 
NULL, s1 VARCHAR, s2 VARCHAR, s3 VARCHAR, s4 VARCHAR CONSTRAINT pk PRIMARY KEY 
(k1, k2));
-conn.createStatement().execute(ddl);
-conn.createStatement().execute(CREATE INDEX i1 ON t(k2, s2, s3, s1));
-conn.createStatement().execute(CREATE INDEX i2 ON t(k2, s2||'_'||s3, 
s1, s4));
-
-ddl = CREATE VIEW v AS SELECT * FROM t WHERE s1 = 'foo';
-conn.createStatement().execute(ddl);
-conn.createStatement().execute(UPSERT INTO t 
VALUES(1,1,'foo','abc','cab'));
-conn.createStatement().execute(UPSERT INTO t 
VALUES(2,2,'bar','xyz','zyx'));
-conn.commit();
-
-rs = conn.createStatement().executeQuery(SELECT count(*) FROM v);
-assertTrue(rs.next());
-assertEquals(1, rs.getLong(1));
-assertFalse(rs.next());
-
-//i2 should be used since it contains s3||'_'||s4 i
-String query = SELECT s2||'_'||s3 FROM v WHERE k2=1 AND 
(s2||'_'||s3)='abc_cab';
-rs = conn.createStatement(  ).executeQuery(EXPLAIN  + query);
-String queryPlan = QueryUtil.getExplainPlan(rs);
-assertEquals(
-CLIENT PARALLEL 1-WAY RANGE SCAN OVER I2 
[1,'abc_cab','foo']\n + 
-SERVER FILTER BY FIRST KEY ONLY, queryPlan);
-rs = conn.createStatement().executeQuery(query);
-assertTrue(rs.next());
-assertEquals(abc_cab, rs.getString(1));
-assertFalse(rs.next());
-
-conn.createStatement().execute(ALTER VIEW v DROP COLUMN s4);
-//i2 cannot be used since s4 has been dropped from the view, so i1 
will be used 
-rs = conn.createStatement().executeQuery(EXPLAIN  + query);
-queryPlan = QueryUtil.getExplainPlan(rs);
-assertEquals(
-CLIENT PARALLEL 1-WAY RANGE SCAN OVER I1 [1]\n + 
-SERVER FILTER BY FIRST KEY ONLY AND ((\S2\ || '_' || 
\S3\) = 'abc_cab' AND \S1\ = 'foo'), queryPlan);
-rs = conn.createStatement().executeQuery(query);
-assertTrue(rs.next());
-assertEquals(abc_cab, rs.getString(1));
-assertFalse(rs.next());
+try 
+{
+   ResultSet rs;
+   String ddl = CREATE TABLE t (k1 INTEGER NOT NULL, k2 INTEGER 
NOT NULL, s1 VARCHAR, s2 VARCHAR, s3 VARCHAR, s4 VARCHAR CONSTRAINT pk PRIMARY 
KEY (k1, k2));
+   conn.createStatement().execute(ddl);
+   conn.createStatement().execute(CREATE INDEX i1 ON t(k2, s2, 
s3, s1));
+   conn.createStatement().execute(CREATE INDEX i2 ON t(k2, 
s2||'_'||s3, s1, s4));
+   
+   ddl = CREATE VIEW v AS SELECT * FROM t WHERE s1 = 'foo';
+   conn.createStatement().execute(ddl);
+   conn.createStatement().execute(UPSERT INTO t 

phoenix git commit: PHOENIX-1684 Functional Index using REGEXP_SUBSTR doesn't work correctly

2015-03-24 Thread tdsilva
Repository: phoenix
Updated Branches:
  refs/heads/master ab9c9283e - a94a6f419


PHOENIX-1684 Functional Index using REGEXP_SUBSTR doesn't work correctly


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a94a6f41
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a94a6f41
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a94a6f41

Branch: refs/heads/master
Commit: a94a6f4195af2867379803f19c90045eb3943c2d
Parents: ab9c928
Author: Thomas tdsi...@salesforce.com
Authored: Thu Mar 19 13:57:27 2015 -0700
Committer: Thomas tdsi...@salesforce.com
Committed: Tue Mar 24 15:58:52 2015 -0700

--
 .../end2end/index/IndexExpressionIT.java| 161 ++-
 .../phoenix/compile/PostIndexDDLCompiler.java   |   4 +-
 .../parse/IndexExpressionParseNodeRewriter.java |  30 +---
 .../apache/phoenix/schema/MetaDataClient.java   |   4 +-
 .../org/apache/phoenix/util/StringUtil.java |   5 +
 .../phoenix/compile/QueryCompilerTest.java  |  22 ++-
 6 files changed, 153 insertions(+), 73 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a94a6f41/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
index 1e3733b..0203e35 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
@@ -1202,54 +1202,60 @@ public class IndexExpressionIT extends 
BaseHBaseManagedTimeIT {
 
 @Test
 public void testViewUsesTableIndex() throws Exception {
-ResultSet rs;
 Connection conn = DriverManager.getConnection(getUrl());
-String ddl = CREATE TABLE t (k1 INTEGER NOT NULL, k2 INTEGER NOT 
NULL, s1 VARCHAR, s2 VARCHAR, s3 VARCHAR, s4 VARCHAR CONSTRAINT pk PRIMARY KEY 
(k1, k2));
-conn.createStatement().execute(ddl);
-conn.createStatement().execute(CREATE INDEX i1 ON t(k2, s2, s3, s1));
-conn.createStatement().execute(CREATE INDEX i2 ON t(k2, s2||'_'||s3, 
s1, s4));
-
-ddl = CREATE VIEW v AS SELECT * FROM t WHERE s1 = 'foo';
-conn.createStatement().execute(ddl);
-conn.createStatement().execute(UPSERT INTO t 
VALUES(1,1,'foo','abc','cab'));
-conn.createStatement().execute(UPSERT INTO t 
VALUES(2,2,'bar','xyz','zyx'));
-conn.commit();
-
-rs = conn.createStatement().executeQuery(SELECT count(*) FROM v);
-assertTrue(rs.next());
-assertEquals(1, rs.getLong(1));
-assertFalse(rs.next());
-
-//i2 should be used since it contains s3||'_'||s4 i
-String query = SELECT s2||'_'||s3 FROM v WHERE k2=1 AND 
(s2||'_'||s3)='abc_cab';
-rs = conn.createStatement(  ).executeQuery(EXPLAIN  + query);
-String queryPlan = QueryUtil.getExplainPlan(rs);
-assertEquals(
-CLIENT PARALLEL 1-WAY RANGE SCAN OVER I2 
[1,'abc_cab','foo']\n + 
-SERVER FILTER BY FIRST KEY ONLY, queryPlan);
-rs = conn.createStatement().executeQuery(query);
-assertTrue(rs.next());
-assertEquals(abc_cab, rs.getString(1));
-assertFalse(rs.next());
-
-conn.createStatement().execute(ALTER VIEW v DROP COLUMN s4);
-//i2 cannot be used since s4 has been dropped from the view, so i1 
will be used 
-rs = conn.createStatement().executeQuery(EXPLAIN  + query);
-queryPlan = QueryUtil.getExplainPlan(rs);
-assertEquals(
-CLIENT PARALLEL 1-WAY RANGE SCAN OVER I1 [1]\n + 
-SERVER FILTER BY FIRST KEY ONLY AND ((\S2\ || '_' || 
\S3\) = 'abc_cab' AND \S1\ = 'foo'), queryPlan);
-rs = conn.createStatement().executeQuery(query);
-assertTrue(rs.next());
-assertEquals(abc_cab, rs.getString(1));
-assertFalse(rs.next());
+try 
+{
+   ResultSet rs;
+   String ddl = CREATE TABLE t (k1 INTEGER NOT NULL, k2 INTEGER 
NOT NULL, s1 VARCHAR, s2 VARCHAR, s3 VARCHAR, s4 VARCHAR CONSTRAINT pk PRIMARY 
KEY (k1, k2));
+   conn.createStatement().execute(ddl);
+   conn.createStatement().execute(CREATE INDEX i1 ON t(k2, s2, 
s3, s1));
+   conn.createStatement().execute(CREATE INDEX i2 ON t(k2, 
s2||'_'||s3, s1, s4));
+   
+   ddl = CREATE VIEW v AS SELECT * FROM t WHERE s1 = 'foo';
+   conn.createStatement().execute(ddl);
+   conn.createStatement().execute(UPSERT INTO t 

Build failed in Jenkins: Phoenix-4.x-HBase-1.x #5

2015-03-24 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-4.x-HBase-1.x/5/changes

Changes:

[gabrielr] PHOENIX-1653 Support separate clusters for MR jobs

--
[...truncated 127681 lines...]
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testClientCacheUpdatedOnChangingPhoenixTableProperties(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testClientCacheUpdatedOnChangingPhoenixTableProperties:1964 ? 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDisableWAL(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDisableWAL:728 ? PhoenixIO 
org.apache.hadoop.hbase.DoNotRetry...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDisallowAddingNotNullableColumnNotPartOfPkForExistingTable(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testDisallowAddingNotNullableColumnNotPartOfPkForExistingTable:697 
? PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropColumnFromSaltedTable(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnFromSaltedTable:546 ? PhoenixIO 
org.apache.hadoop.h...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropColumnsWithImutability(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnsWithImutability:823 ? PhoenixIO 
org.apache.hadoop
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropCoveredColumn(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropCoveredColumn:319 ? PhoenixIO 
org.apache.hadoop.hbase.DoN...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testDropVarCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropVarCols:673 ? PhoenixIO 
org.apache.hadoop.hbase.DoNotRetr...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF:1302 ? PhoenixIO 
org.apac...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed:1213 ? 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols:1711
 ? PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  

[2/2] phoenix git commit: PHOENIX-1691 Allow settting sampling rate while enabling tracing.(Rajeshbabu)

2015-03-24 Thread jamestaylor
PHOENIX-1691 Allow settting sampling rate while enabling tracing.(Rajeshbabu)

Conflicts:

phoenix-core/src/main/antlr3/PhoenixSQL.g


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f977ac6f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f977ac6f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f977ac6f

Branch: refs/heads/4.3
Commit: f977ac6f351716f4c4143fdef080e0beb938b1ba
Parents: 30ddd77
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed Mar 18 10:20:22 2015 +0530
Committer: James Taylor jamestay...@apache.org
Committed: Tue Mar 24 16:16:19 2015 -0700

--
 .../phoenix/trace/PhoenixTracingEndToEndIT.java | 39 +---
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |  9 -
 .../apache/phoenix/compile/TraceQueryPlan.java  | 33 -
 .../apache/phoenix/jdbc/PhoenixStatement.java   |  8 ++--
 .../apache/phoenix/parse/ParseNodeFactory.java  |  4 +-
 .../apache/phoenix/parse/TraceStatement.java| 12 --
 6 files changed, 79 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f977ac6f/phoenix-core/src/it/java/org/apache/phoenix/trace/PhoenixTracingEndToEndIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/trace/PhoenixTracingEndToEndIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/trace/PhoenixTracingEndToEndIT.java
index 53d22c5..05d9e41 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/trace/PhoenixTracingEndToEndIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/trace/PhoenixTracingEndToEndIT.java
@@ -46,6 +46,7 @@ import org.cloudera.htrace.Span;
 import org.cloudera.htrace.SpanReceiver;
 import org.cloudera.htrace.Trace;
 import org.cloudera.htrace.TraceScope;
+import org.cloudera.htrace.impl.ProbabilitySampler;
 import org.junit.After;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -403,18 +404,44 @@ public class PhoenixTracingEndToEndIT extends 
BaseTracingTestIT {
 Statement statement = conn1.createStatement();
 ResultSet  rs = statement.executeQuery(TRACE ON);
 assertTrue(rs.next());
-long traceId = ((PhoenixConnection) 
conn1).getTraceScope().getSpan()
-.getTraceId();
-assertEquals(rs.getLong(1), traceId);
-assertEquals(rs.getLong(trace_id), traceId);
+PhoenixConnection pconn= (PhoenixConnection) conn1;
+long traceId = pconn.getTraceScope().getSpan().getTraceId();
+assertEquals(traceId, rs.getLong(1));
+assertEquals(traceId, rs.getLong(trace_id));
 assertFalse(rs.next());
+assertEquals(Sampler.ALWAYS, pconn.getSampler());
 
 rs = statement.executeQuery(TRACE OFF);
 assertTrue(rs.next());
-assertEquals(rs.getLong(1), traceId);
-assertEquals(rs.getLong(trace_id), traceId);
+assertEquals(traceId, rs.getLong(1));
+assertEquals(traceId, rs.getLong(trace_id));
+assertFalse(rs.next());
+assertEquals(Sampler.NEVER, pconn.getSampler());
+
+rs = statement.executeQuery(TRACE OFF);
 assertFalse(rs.next());
 
+rs = statement.executeQuery(TRACE ON  WITH SAMPLING 0.5);
+rs.next();
+assertTrue(((PhoenixConnection) conn1).getSampler() instanceof 
ProbabilitySampler);
+
+rs = statement.executeQuery(TRACE ON  WITH SAMPLING 1.0);
+assertTrue(rs.next());
+traceId = pconn.getTraceScope().getSpan()
+.getTraceId();
+assertEquals(traceId, rs.getLong(1));
+assertEquals(traceId, rs.getLong(trace_id));
+assertFalse(rs.next());
+assertEquals(Sampler.ALWAYS, pconn.getSampler());
+
+rs = statement.executeQuery(TRACE ON  WITH SAMPLING 0.5);
+rs.next();
+assertTrue(((PhoenixConnection) conn1).getSampler() instanceof 
ProbabilitySampler);
+
+rs = statement.executeQuery(TRACE ON WITH SAMPLING 0.0);
+rs.next();
+assertEquals(Sampler.NEVER, pconn.getSampler());
+
 rs = statement.executeQuery(TRACE OFF);
 assertFalse(rs.next());
} finally {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f977ac6f/phoenix-core/src/main/antlr3/PhoenixSQL.g
--
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g 
b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index a007308..b72578c 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -109,6 +109,7 @@ tokens
 STATISTICS='statistics';
 

Build failed in Jenkins: Phoenix-4.x-HBase-1.x #6

2015-03-24 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-4.x-HBase-1.x/6/changes

Changes:

[thomas] PHOENIX-1744 Allow Integer, UnsignedInt and UnsignedLong to be Cast to 
TIMESTAMP (Dave Hacker)

--
[...truncated 127169 lines...]
  Run 1: AlterTableIT.testAlterStoreNulls:1860 ? PhoenixIO 
org.apache.phoenix.exception...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testClientCacheUpdatedOnChangingPhoenixTableProperties(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testClientCacheUpdatedOnChangingPhoenixTableProperties:1964 ? 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDisableWAL(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDisableWAL:728 ? PhoenixIO 
org.apache.hadoop.hbase.DoNotRetry...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDisallowAddingNotNullableColumnNotPartOfPkForExistingTable(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testDisallowAddingNotNullableColumnNotPartOfPkForExistingTable:697 
? PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropColumnFromSaltedTable(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnFromSaltedTable:546 ? PhoenixIO 
org.apache.hadoop.h...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropColumnsWithImutability(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnsWithImutability:823 ? PhoenixIO 
org.apache.hadoop
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropCoveredColumn(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropCoveredColumn:319 ? PhoenixIO 
org.apache.hadoop.hbase.DoN...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testDropVarCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropVarCols:673 ? PhoenixIO 
org.apache.hadoop.hbase.DoNotRetr...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF:1302 ? PhoenixIO 
org.apac...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed:1213 ? 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 ? PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols:1711
 ? PhoenixIO
  Run 2: 

Build failed in Jenkins: Phoenix-4.x-HBase-1.x #7

2015-03-24 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-4.x-HBase-1.x/7/changes

Changes:

[thomas] PHOENIX-1684 Functional Index using REGEXP_SUBSTR doesn't work 
correctly

--
[...truncated 129547 lines...]
  Run 1: AlterTableIT.testAlterStoreNulls:1860 » PhoenixIO 
org.apache.phoenix.exception...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testClientCacheUpdatedOnChangingPhoenixTableProperties(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testClientCacheUpdatedOnChangingPhoenixTableProperties:1964 » 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDisableWAL(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDisableWAL:728 » PhoenixIO 
org.apache.hadoop.hbase.DoNotRetry...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDisallowAddingNotNullableColumnNotPartOfPkForExistingTable(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testDisallowAddingNotNullableColumnNotPartOfPkForExistingTable:697 
» PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropColumnFromSaltedTable(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnFromSaltedTable:546 » PhoenixIO 
org.apache.hadoop.h...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropColumnsWithImutability(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnsWithImutability:823 » PhoenixIO 
org.apache.hadoop
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropCoveredColumn(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropCoveredColumn:319 » PhoenixIO 
org.apache.hadoop.hbase.DoN...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testDropVarCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropVarCols:673 » PhoenixIO 
org.apache.hadoop.hbase.DoNotRetr...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF:1302 » PhoenixIO 
org.apac...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed:1213 » 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols:1711
 » PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » 

Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2015-03-24 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[samarth.jain] PHOENIX-1744 CAST from UNSIGNED_LONG (_INT) to * TIMESTAMP is not supported (Dave Hacker)



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


phoenix git commit: PHOENIX-1744 CAST from UNSIGNED_LONG (_INT) to * TIMESTAMP is not supported (Dave Hacker)

2015-03-24 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.3 236f40785 - 362e37264


PHOENIX-1744 CAST from UNSIGNED_LONG (_INT) to * TIMESTAMP is not supported 
(Dave Hacker)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/362e3726
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/362e3726
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/362e3726

Branch: refs/heads/4.3
Commit: 362e37264d9998928b2051ec917e7251ad48456d
Parents: 236f407
Author: Samarth samarth.j...@salesforce.com
Authored: Tue Mar 24 09:56:30 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Tue Mar 24 09:56:30 2015 -0700

--
 .../phoenix/end2end/ToDateFunctionIT.java   | 57 
 .../phoenix/schema/types/PUnsignedLong.java |  5 ++
 2 files changed, 62 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/362e3726/phoenix-core/src/it/java/org/apache/phoenix/end2end/ToDateFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ToDateFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ToDateFunctionIT.java
index bda4ea5..8de39b7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ToDateFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ToDateFunctionIT.java
@@ -33,6 +33,7 @@ import java.sql.Timestamp;
 import java.util.Properties;
 
 import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.schema.TypeMismatchException;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -176,4 +177,60 @@ public class ToDateFunctionIT extends 
BaseHBaseManagedTimeIT {
 callToDateFunction(
 customTimeZoneConn, TO_DATE('1970-01-01', 
'-MM-dd')).getTime());
 }
+
+@Test
+public void testTimestampCast() throws SQLException {
+Properties props = new Properties();
+props.setProperty(QueryServices.DATE_FORMAT_TIMEZONE_ATTRIB, GMT+1);
+Connection customTimeZoneConn = DriverManager.getConnection(getUrl(), 
props);
+
+assertEquals(
+1426188807198L,
+callToDateFunction(
+customTimeZoneConn, CAST(1426188807198 AS 
TIMESTAMP)).getTime());
+
+
+try {
+callToDateFunction(
+customTimeZoneConn, CAST(22005 AS TIMESTAMP));
+fail();
+} catch (TypeMismatchException e) {
+
+}
+}
+
+@Test
+public void testUnsignedLongToTimestampCast() throws SQLException {
+Properties props = new Properties();
+props.setProperty(QueryServices.DATE_FORMAT_TIMEZONE_ATTRIB, GMT+1);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+try {
+conn.prepareStatement(
+create table TT(
++ a unsigned_int not null, 
++ b unsigned_int not null, 
++ ts unsigned_long not null 
++ constraint PK primary key (a, b, ts))).execute();
+conn.commit();
+
+conn.prepareStatement(upsert into TT values (0, 22120, 
1426188807198)).execute();
+conn.commit();
+
+ResultSet rs = conn.prepareStatement(select a, b, ts, CAST(ts AS 
TIMESTAMP) from TT).executeQuery();
+assertTrue(rs.next());
+assertEquals(new Date(1426188807198L), rs.getObject(4));
+rs.close();
+
+try {
+rs = conn.prepareStatement(select a, b, ts, CAST(b AS 
TIMESTAMP) from TT).executeQuery();
+fail();
+} catch (TypeMismatchException e) {
+
+}
+
+} finally {
+conn.close();
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/362e3726/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedLong.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedLong.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedLong.java
index a0ead11..005e2c4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedLong.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedLong.java
@@ -94,6 +94,11 @@ public class PUnsignedLong extends PDataTypeLong {
   }
 
   @Override
+public boolean isCastableTo(PDataType targetType) {
+  return super.isCastableTo(targetType) || 
targetType.isCoercibleTo(PTimestamp.INSTANCE);
+}
+
+  @Override
   public boolean isCoercibleTo(PDataType targetType) {
   

Build failed in Jenkins: Phoenix | Master #633

2015-03-24 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-master/633/changes

Changes:

[samarth.jain] PHOENIX-1744 CAST from UNSIGNED_LONG (_INT) to * TIMESTAMP is 
not supported (Dave Hacker)

--
Started by an SCM change
Building remotely on ubuntu-5 (docker Ubuntu ubuntu5 ubuntu) in workspace 
https://builds.apache.org/job/Phoenix-master/ws/
  git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
  git config remote.origin.url 
  https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
  git --version # timeout=10
  git fetch --tags --progress 
  https://git-wip-us.apache.org/repos/asf/phoenix.git 
  +refs/heads/*:refs/remotes/origin/*
  git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 673847109206ea1e73ae2a18258da304678a110f (origin/master)
  git config core.sparsecheckout # timeout=10
  git checkout -f 673847109206ea1e73ae2a18258da304678a110f
  git rev-list 898c7912149a81a0e287a4d82e5faa7163de6ce1 # timeout=10
No emails were triggered.
[Phoenix-master] $ /bin/bash -xe /tmp/hudson923732664369810261.sh
[Phoenix-master] $ /home/jenkins/tools/maven/apache-maven-3.0.4/bin/mvn clean 
install
[INFO] Scanning for projects...
[WARNING] 
[WARNING] Some problems were encountered while building the effective model for 
org.apache.phoenix:phoenix-core:jar:5.0.0-SNAPSHOT
[WARNING] 'build.plugins.plugin.(groupId:artifactId)' must be unique but found 
duplicate declaration of plugin 
org.apache.maven.plugins:maven-dependency-plugin @ line 204, column 15
[WARNING] 
[WARNING] It is highly recommended to fix these problems because they threaten 
the stability of your build.
[WARNING] 
[WARNING] For this reason, future Maven versions might no longer support 
building such malformed projects.
[WARNING] 
[INFO] 
[INFO] Reactor Build Order:
[INFO] 
[INFO] Apache Phoenix
[INFO] Phoenix Core
[INFO] Phoenix - Flume
[INFO] Phoenix - Pig
[INFO] Phoenix Assembly
[INFO] Phoenix - Pherf
[INFO] 
[INFO] 
[INFO] Building Apache Phoenix 5.0.0-SNAPSHOT
[INFO] 
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ phoenix ---
[INFO] Deleting https://builds.apache.org/job/Phoenix-master/ws/target
[INFO] 
[INFO] --- maven-remote-resources-plugin:1.5:process (default) @ phoenix ---
[INFO] 
[INFO] --- maven-source-plugin:2.2.1:jar-no-fork (attach-sources) @ phoenix ---
[INFO] 
[INFO] --- maven-jar-plugin:2.4:test-jar (default) @ phoenix ---
[WARNING] JAR will be empty - no content was marked for inclusion!
[INFO] Building jar: 
https://builds.apache.org/job/Phoenix-master/633/artifact/target/phoenix-5.0.0-SNAPSHOT-tests.jar
[INFO] 
[INFO] --- maven-site-plugin:3.2:attach-descriptor (attach-descriptor) @ 
phoenix ---
[INFO] 
[INFO] --- maven-install-plugin:2.5.1:install (default-install) @ phoenix ---
[INFO] Installing https://builds.apache.org/job/Phoenix-master/ws/pom.xml to 
/home/jenkins/.m2/repository/org/apache/phoenix/phoenix/5.0.0-SNAPSHOT/phoenix-5.0.0-SNAPSHOT.pom
[INFO] Installing 
https://builds.apache.org/job/Phoenix-master/633/artifact/target/phoenix-5.0.0-SNAPSHOT-tests.jar
 to 
/home/jenkins/.m2/repository/org/apache/phoenix/phoenix/5.0.0-SNAPSHOT/phoenix-5.0.0-SNAPSHOT-tests.jar
[INFO] 
[INFO] 
[INFO] Building Phoenix Core 5.0.0-SNAPSHOT
[INFO] 
Downloading: 
http://people.apache.org/~garyh/mvn/org/apache/hbase/hbase-protocol/1.0.1-SNAPSHOT/maven-metadata.xml

[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ phoenix-core ---
[INFO] Deleting 
https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target
[INFO] 
[INFO] --- build-helper-maven-plugin:1.9.1:add-test-source (add-test-source) @ 
phoenix-core ---
[INFO] Test Source directory: 
https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/src/it/java 
added.
[INFO] 
[INFO] --- build-helper-maven-plugin:1.9.1:add-test-resource 
(add-test-resource) @ phoenix-core ---
[INFO] 
[INFO] --- build-helper-maven-plugin:1.9.1:add-source (add-source) @ 
phoenix-core ---
[INFO] Source directory: 
https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/target/generated-sources/antlr3
 added.
[INFO] Source directory: 
https://builds.apache.org/job/Phoenix-master/ws/phoenix-core/src/main/antlr3 
added.
[INFO] 
[INFO] --- antlr3-maven-plugin:3.5:antlr (default) @ phoenix-core ---
[INFO] ANTLR: Processing source directory 

phoenix git commit: PHOENIX-1653 Support separate clusters for MR jobs

2015-03-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 673847109 - ab9c9283e


PHOENIX-1653 Support separate clusters for MR jobs

Add support for the input and output formats of a Phoenix MapReduce job to
point to separate clusters using override configuration settings. Defaults to
existing behavior (HConstants.ZOOKEEPER_QUORUM)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ab9c9283
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ab9c9283
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ab9c9283

Branch: refs/heads/master
Commit: ab9c9283eca692b6f3ef03a598f4c012e63ef83a
Parents: 6738471
Author: gjacoby gjac...@salesforce.com
Authored: Fri Feb 27 16:49:14 2015 -0800
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Mar 24 20:08:08 2015 +0100

--
 .../phoenix/mapreduce/PhoenixInputFormat.java   | 15 ++--
 .../phoenix/mapreduce/PhoenixRecordWriter.java  |  2 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  2 +-
 .../index/PhoenixIndexImportMapper.java |  2 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  | 88 ++--
 .../util/PhoenixConfigurationUtil.java  | 72 ++--
 .../mapreduce/util/PhoenixMapReduceUtil.java| 22 -
 .../util/PhoenixConfigurationUtilTest.java  | 60 -
 .../pig/util/QuerySchemaParserFunction.java |  2 +-
 .../pig/util/SqlQueryToColumnInfoFunction.java  |  2 +-
 10 files changed, 219 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ab9c9283/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index a83b9ae..31759b4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -98,15 +98,16 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
  * @throws IOException
  * @throws SQLException
  */
-private QueryPlan getQueryPlan(final JobContext context,final 
Configuration configuration) throws IOException {
+private QueryPlan getQueryPlan(final JobContext context, final 
Configuration configuration)
+throws IOException {
 Preconditions.checkNotNull(context);
-try{
+try {
 final String currentScnValue = 
configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
 final Properties overridingProps = new Properties();
 if(currentScnValue != null) {
 overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
currentScnValue);
 }
-final Connection connection = 
ConnectionUtil.getConnection(configuration,overridingProps);
+final Connection connection = 
ConnectionUtil.getInputConnection(configuration, overridingProps);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
 Preconditions.checkNotNull(selectStatement);
 final Statement statement = connection.createStatement();
@@ -116,9 +117,11 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
 // Initialize the query plan so it sets up the parallel scans
 queryPlan.iterator();
 return queryPlan;
-} catch(Exception exception) {
-LOG.error(String.format(Failed to get the query plan with error 
[%s],exception.getMessage()));
+} catch (Exception exception) {
+LOG.error(String.format(Failed to get the query plan with error 
[%s],
+exception.getMessage()));
 throw new RuntimeException(exception);
 }
-   }
+}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ab9c9283/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
index 4d26bf4..5843076 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
@@ -46,7 +46,7 @@ public class PhoenixRecordWriterT extends DBWritable  
extends RecordWriterNul
 private long numRecords = 0;
 
 public PhoenixRecordWriter(final Configuration configuration) throws 

phoenix git commit: PHOENIX-1653 Support separate clusters for MR jobs

2015-03-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.x f4180fa40 - 7de8ee1e9


PHOENIX-1653 Support separate clusters for MR jobs

Add support for the input and output formats of a Phoenix MapReduce job to
point to separate clusters using override configuration settings. Defaults to
existing behavior (HConstants.ZOOKEEPER_QUORUM)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7de8ee1e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7de8ee1e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7de8ee1e

Branch: refs/heads/4.x-HBase-1.x
Commit: 7de8ee1e914f5e0008ca9d983869757e4ca92b78
Parents: f4180fa
Author: gjacoby gjac...@salesforce.com
Authored: Fri Feb 27 16:49:14 2015 -0800
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Mar 24 20:07:52 2015 +0100

--
 .../phoenix/mapreduce/PhoenixInputFormat.java   | 15 ++--
 .../phoenix/mapreduce/PhoenixRecordWriter.java  |  2 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  2 +-
 .../index/PhoenixIndexImportMapper.java |  2 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  | 88 ++--
 .../util/PhoenixConfigurationUtil.java  | 72 ++--
 .../mapreduce/util/PhoenixMapReduceUtil.java| 22 -
 .../util/PhoenixConfigurationUtilTest.java  | 60 -
 .../pig/util/QuerySchemaParserFunction.java |  2 +-
 .../pig/util/SqlQueryToColumnInfoFunction.java  |  2 +-
 10 files changed, 219 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7de8ee1e/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index a83b9ae..31759b4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -98,15 +98,16 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
  * @throws IOException
  * @throws SQLException
  */
-private QueryPlan getQueryPlan(final JobContext context,final 
Configuration configuration) throws IOException {
+private QueryPlan getQueryPlan(final JobContext context, final 
Configuration configuration)
+throws IOException {
 Preconditions.checkNotNull(context);
-try{
+try {
 final String currentScnValue = 
configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
 final Properties overridingProps = new Properties();
 if(currentScnValue != null) {
 overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
currentScnValue);
 }
-final Connection connection = 
ConnectionUtil.getConnection(configuration,overridingProps);
+final Connection connection = 
ConnectionUtil.getInputConnection(configuration, overridingProps);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
 Preconditions.checkNotNull(selectStatement);
 final Statement statement = connection.createStatement();
@@ -116,9 +117,11 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
 // Initialize the query plan so it sets up the parallel scans
 queryPlan.iterator();
 return queryPlan;
-} catch(Exception exception) {
-LOG.error(String.format(Failed to get the query plan with error 
[%s],exception.getMessage()));
+} catch (Exception exception) {
+LOG.error(String.format(Failed to get the query plan with error 
[%s],
+exception.getMessage()));
 throw new RuntimeException(exception);
 }
-   }
+}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7de8ee1e/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
index 4d26bf4..5843076 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
@@ -46,7 +46,7 @@ public class PhoenixRecordWriterT extends DBWritable  
extends RecordWriterNul
 private long numRecords = 0;
 
 public PhoenixRecordWriter(final Configuration configuration) 

phoenix git commit: PHOENIX-1653 Support separate clusters for MR jobs

2015-03-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 c92de2749 - af0d65a0a


PHOENIX-1653 Support separate clusters for MR jobs

Add support for the input and output formats of a Phoenix MapReduce job to
point to separate clusters using override configuration settings. Defaults to
existing behavior (HConstants.ZOOKEEPER_QUORUM)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/af0d65a0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/af0d65a0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/af0d65a0

Branch: refs/heads/4.x-HBase-0.98
Commit: af0d65a0abdbb8dc73d766a15e3ffa0e6d854d13
Parents: c92de27
Author: gjacoby gjac...@salesforce.com
Authored: Fri Feb 27 16:49:14 2015 -0800
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Mar 24 19:40:21 2015 +0100

--
 .../phoenix/mapreduce/PhoenixInputFormat.java   | 15 ++--
 .../phoenix/mapreduce/PhoenixRecordWriter.java  |  2 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  2 +-
 .../index/PhoenixIndexImportMapper.java |  2 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  | 88 ++--
 .../util/PhoenixConfigurationUtil.java  | 72 ++--
 .../mapreduce/util/PhoenixMapReduceUtil.java| 22 -
 .../util/PhoenixConfigurationUtilTest.java  | 60 -
 .../pig/util/QuerySchemaParserFunction.java |  2 +-
 .../pig/util/SqlQueryToColumnInfoFunction.java  |  2 +-
 10 files changed, 219 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/af0d65a0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index a83b9ae..31759b4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -98,15 +98,16 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
  * @throws IOException
  * @throws SQLException
  */
-private QueryPlan getQueryPlan(final JobContext context,final 
Configuration configuration) throws IOException {
+private QueryPlan getQueryPlan(final JobContext context, final 
Configuration configuration)
+throws IOException {
 Preconditions.checkNotNull(context);
-try{
+try {
 final String currentScnValue = 
configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
 final Properties overridingProps = new Properties();
 if(currentScnValue != null) {
 overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
currentScnValue);
 }
-final Connection connection = 
ConnectionUtil.getConnection(configuration,overridingProps);
+final Connection connection = 
ConnectionUtil.getInputConnection(configuration, overridingProps);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
 Preconditions.checkNotNull(selectStatement);
 final Statement statement = connection.createStatement();
@@ -116,9 +117,11 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
 // Initialize the query plan so it sets up the parallel scans
 queryPlan.iterator();
 return queryPlan;
-} catch(Exception exception) {
-LOG.error(String.format(Failed to get the query plan with error 
[%s],exception.getMessage()));
+} catch (Exception exception) {
+LOG.error(String.format(Failed to get the query plan with error 
[%s],
+exception.getMessage()));
 throw new RuntimeException(exception);
 }
-   }
+}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/af0d65a0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
index 4d26bf4..5843076 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
@@ -46,7 +46,7 @@ public class PhoenixRecordWriterT extends DBWritable  
extends RecordWriterNul
 private long numRecords = 0;
 
 public PhoenixRecordWriter(final Configuration configuration) 

Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2015-03-24 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[thomas] PHOENIX-1676 Set priority of Index Updates correctly



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2015-03-24 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[thomas] PHOENIX-1676 Set priority of Index Updates correctly



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


phoenix git commit: PHOENIX-1676 Set priority of Index Updates correctly

2015-03-24 Thread tdsilva
Repository: phoenix
Updated Branches:
  refs/heads/master a784bc050 - 898c79121


PHOENIX-1676 Set priority of Index Updates correctly


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/898c7912
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/898c7912
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/898c7912

Branch: refs/heads/master
Commit: 898c7912149a81a0e287a4d82e5faa7163de6ce1
Parents: a784bc0
Author: Thomas tdsi...@salesforce.com
Authored: Mon Mar 23 22:17:16 2015 -0700
Committer: Thomas tdsi...@salesforce.com
Committed: Mon Mar 23 23:07:23 2015 -0700

--
 .../phoenix/end2end/index/IndexQosIT.java   | 243 +++
 .../hbase/ipc/PhoenixIndexRpcScheduler.java |   3 +
 .../phoenix/hbase/index/IndexQosCompat.java |  98 
 .../index/IndexQosRpcControllerFactory.java |  12 +-
 .../index/table/CoprocessorHTableFactory.java   |  20 --
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   7 +-
 .../org/apache/phoenix/util/SchemaUtil.java |   7 +
 7 files changed, 263 insertions(+), 127 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/898c7912/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexQosIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexQosIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexQosIT.java
new file mode 100644
index 000..bab8f38
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexQosIT.java
@@ -0,0 +1,243 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE
+ * file distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the License); you may not 
use this file except in compliance with the
+ * License. You may obtain a copy of the License at 
http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+ * applicable law or agreed to in writing, software distributed under the 
License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
See the License for the specific language
+ * governing permissions and limitations under the License.
+ */
+package org.apache.phoenix.end2end.index;
+
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR;
+import static 
org.apache.phoenix.util.PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM;
+import static org.apache.phoenix.util.TestUtil.LOCALHOST;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.ipc.BalancedQueueRpcExecutor;
+import org.apache.hadoop.hbase.ipc.CallRunner;
+import org.apache.hadoop.hbase.ipc.PhoenixIndexRpcScheduler;
+import org.apache.hadoop.hbase.ipc.PriorityFunction;
+import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.ipc.RpcExecutor;
+import org.apache.hadoop.hbase.ipc.RpcScheduler;
+import org.apache.hadoop.hbase.master.AssignmentManager;
+import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.RSRpcServices;
+import org.apache.hadoop.hbase.regionserver.RegionServerServices;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.hbase.index.IndexQosRpcControllerFactory;
+import org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory;
+import org.apache.phoenix.jdbc.PhoenixTestDriver;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.QueryUtil;
+import 

Build failed in Jenkins: Phoenix | Master #634

2015-03-24 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-master/634/changes

Changes:

[gabrielr] PHOENIX-1653 Support separate clusters for MR jobs

--
[...truncated 127396 lines...]

org.apache.phoenix.end2end.AlterTableIT.testDropColumnsWithImutability(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropColumnsWithImutability:823 » PhoenixIO 
org.apache.hadoop
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testDropCoveredColumn(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropCoveredColumn:319 » PhoenixIO 
org.apache.hadoop.hbase.DoN...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testDropVarCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testDropVarCols:673 » PhoenixIO 
org.apache.hadoop.hbase.DoNotRetr...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testNewColumnFamilyInheritsTTLOfEmptyCF:1302 » PhoenixIO 
org.apac...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testSetHColumnOrHTablePropertiesOnViewsNotAllowed:1213 » 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols:1711
 » PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetHTableHColumnAndPhoenixTableProperties(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: AlterTableIT.testSetHTableHColumnAndPhoenixTableProperties:1063 » 
PhoenixIO or...
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
  
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO
org.apache.phoenix.end2end.AlterTableIT.testSetPropertyAndAddColumnForDifferentColumnFamilies(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testSetPropertyAndAddColumnForDifferentColumnFamilies:1427 » 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO

org.apache.phoenix.end2end.AlterTableIT.testSetPropertyAndAddColumnForNewAndExistingColumnFamily(org.apache.phoenix.end2end.AlterTableIT)
  Run 1: 
AlterTableIT.testSetPropertyAndAddColumnForNewAndExistingColumnFamily:1351 » 
PhoenixIO
  Run 2: 
AlterTableITBaseOwnClusterHBaseManagedTimeIT.cleanUpAfterTest:29-BaseTest.deletePriorTables:766-BaseTest.deletePriorTables:777-BaseTest.deletePriorTables:794
 » PhoenixIO