phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 940fd36b5 -> b1e290295


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b1e29029
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b1e29029
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b1e29029

Branch: refs/heads/4.x-HBase-1.1
Commit: b1e2902956e539b407b09f3103d1288339506795
Parents: 940fd36
Author: Samarth 
Authored: Thu Nov 17 23:38:18 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:38:18 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1e29029/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 48f4217..5da0ee7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -40,12 +40,14 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2207,5 +2209,27 @@ public class AlterTableIT extends 
ParallelStatsDisabledIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateUniqueName();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b1e29029/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 25f3bec..940dc56 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -751,6 +753,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+   /*
+* Note that a StaleRegionBoundaryCacheExcept

phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/master 404b4ede7 -> 3aa9c43fd


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3aa9c43f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3aa9c43f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3aa9c43f

Branch: refs/heads/master
Commit: 3aa9c43fd5a8612b63edc42507b057ba8215dac4
Parents: 404b4ed
Author: Samarth 
Authored: Thu Nov 17 23:38:47 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:38:47 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3aa9c43f/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 48f4217..5da0ee7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -40,12 +40,14 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2207,5 +2209,27 @@ public class AlterTableIT extends 
ParallelStatsDisabledIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateUniqueName();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3aa9c43f/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 25f3bec..940dc56 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -751,6 +753,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+   /*
+* Note that a StaleRegionBoundaryCacheException 
could be 

phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 6e514f8e6 -> f4f2e949d


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f4f2e949
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f4f2e949
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f4f2e949

Branch: refs/heads/4.x-HBase-0.98
Commit: f4f2e949d0dd7e553e97df53b59d207bade774c2
Parents: 6e514f8
Author: Samarth 
Authored: Thu Nov 17 23:37:14 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:37:14 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f4f2e949/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 48f4217..5da0ee7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -40,12 +40,14 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2207,5 +2209,27 @@ public class AlterTableIT extends 
ParallelStatsDisabledIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateUniqueName();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f4f2e949/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 25f3bec..940dc56 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -751,6 +753,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+   /*
+* Note that a StaleRegionBoundaryCacheExce

phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.1 28fbb13f0 -> f6161a483


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f6161a48
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f6161a48
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f6161a48

Branch: refs/heads/4.8-HBase-1.1
Commit: f6161a4832570e82bb712cde8e206478e6cd20ed
Parents: 28fbb13
Author: Samarth 
Authored: Thu Nov 17 23:36:48 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:36:48 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f6161a48/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index e09dcea..7b3fc47 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -42,6 +42,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
@@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2216,5 +2218,27 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateRandomString();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f6161a48/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 581e0cd..3bb6463 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -777,6 +779,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+

phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.0 a80043e27 -> 01ccd0964


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/01ccd096
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/01ccd096
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/01ccd096

Branch: refs/heads/4.8-HBase-1.0
Commit: 01ccd09640f40b3fb0ffc7c6f3877a69d70ba987
Parents: a80043e
Author: Samarth 
Authored: Thu Nov 17 23:36:23 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:36:23 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/01ccd096/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index e09dcea..7b3fc47 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -42,6 +42,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
@@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2216,5 +2218,27 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateRandomString();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/01ccd096/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 581e0cd..3bb6463 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -777,6 +779,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+

phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 2c4c96c71 -> fbd41f9dc


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fbd41f9d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fbd41f9d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fbd41f9d

Branch: refs/heads/4.8-HBase-0.98
Commit: fbd41f9dc73248386298d40015abcd0e03bacdf6
Parents: 2c4c96c
Author: Samarth 
Authored: Thu Nov 17 23:35:55 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:35:55 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fbd41f9d/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index e09dcea..7b3fc47 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -42,6 +42,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
@@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2216,5 +2218,27 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateRandomString();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/fbd41f9d/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 581e0cd..3bb6463 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -777,6 +779,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+  

phoenix git commit: PHOENIX-3497 Provide a work around for HBASE-17122

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.2 16795aef6 -> cd48969c9


PHOENIX-3497 Provide a work around for HBASE-17122


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cd48969c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cd48969c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cd48969c

Branch: refs/heads/4.8-HBase-1.2
Commit: cd48969c9ef6784f6448c884981e78519eebf1dd
Parents: 16795ae
Author: Samarth 
Authored: Thu Nov 17 23:35:11 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 23:35:11 2016 -0800

--
 .../apache/phoenix/end2end/AlterTableIT.java| 24 
 .../phoenix/iterate/BaseResultIterators.java| 16 +
 .../org/apache/phoenix/util/ServerUtil.java |  5 
 3 files changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cd48969c/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index e09dcea..7b3fc47 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -42,6 +42,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
@@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
@@ -2216,5 +2218,27 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
}
}

+   @Test
+public void testQueryingDisabledTable() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateRandomString();
+conn.createStatement().execute(
+"CREATE TABLE " + tableName
++ " (k1 VARCHAR NOT NULL, k2 VARCHAR, CONSTRAINT PK 
PRIMARY KEY(K1,K2)) ");
+try (HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+admin.disableTable(Bytes.toBytes(tableName));
+}
+String query = "SELECT * FROM " + tableName + " WHERE 1=1";
+try (Connection conn2 = DriverManager.getConnection(getUrl())) {
+try (ResultSet rs = 
conn2.createStatement().executeQuery(query)) {
+assertFalse(rs.next());
+fail();
+} catch (PhoenixIOException ioe) {
+assertTrue(ioe.getCause() instanceof 
TableNotEnabledException);
+}
+}
+}
+}
+   
 }
  

http://git-wip-us.apache.org/repos/asf/phoenix/blob/cd48969c/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 581e0cd..3bb6463 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotEnabledException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.PageFilter;
@@ -777,6 +779,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 try { // Rethrow as SQLException
 throw ServerUtil.parseServerException(e);
 } catch (StaleRegionBoundaryCacheException e2) {
+

Build failed in Jenkins: Phoenix-4.8-HBase-1.2 #60

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[jamestaylor] PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

--
[...truncated 737 lines...]
Running org.apache.phoenix.tx.TxCheckpointIT
Tests run: 38, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 715.136 sec - 
in org.apache.phoenix.end2end.index.LocalIndexIT
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 212.478 sec - 
in org.apache.phoenix.tx.TxCheckpointIT
Tests run: 21, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 330.24 sec - 
in org.apache.phoenix.tx.TransactionIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1,010.637 sec 
- in org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 136, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1,967.927 sec 
- in org.apache.phoenix.end2end.index.IndexIT

Results :

Tests run: 1239, Failures: 0, Errors: 0, Skipped: 5

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTableReuseTest) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.AlterSessionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.125 sec - in 
org.apache.phoenix.end2end.AlterSessionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.318 sec - in 
org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.ArrayToStringFunctionIT
Running org.apache.phoenix.end2end.AutoCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.734 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.715 sec - in 
org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 32.583 sec - 
in org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.611 sec - in 
org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 36, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 46.852 sec - 
in org.apache.phoenix.end2end.ArrayToStringFunctionIT
Running org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.419 sec - in 
org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.201 sec - in 
org.apache.phoenix.end2end.DecodeFunctionIT
Running org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.166 sec - in 
org.apache.phoenix.end2end.DynamicUpsertIT
Running org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.257 sec - in 
org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 68.976 sec - 
in org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.LikeExpressionIT
Running org.apache.phoenix.end2end.DistinctPrefixFilterIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.575 sec - in 
org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.718 sec - in 
org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.NthValueFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.766 sec - in 
org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Running org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.676 sec - in 
org.apache.phoenix.end2end.LikeExpressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.575 sec - in 
org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Running org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.PrimitiveTypeIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.296 sec - in 
org.apache.phoenix.end2end.PrimitiveTypeIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.725 sec - in 
org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.QueryMoreIT
Running org.apache.phoenix.end2end.RTrimFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped

Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/52/

2016-11-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/52/


Affected test class(es):
Set(['org.apache.phoenix.end2end.AlterTableIT', 
'org.apache.phoenix.end2end.CsvBulkLoadToolIT', 
'org.apache.phoenix.end2end.QueryWithLimitIT', 
'org.apache.phoenix.end2end.MutableIndexToolIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 5e29f8306 -> 2c4c96c71


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2c4c96c7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2c4c96c7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2c4c96c7

Branch: refs/heads/4.8-HBase-0.98
Commit: 2c4c96c7181beb010442373e972818115026450a
Parents: 5e29f83
Author: James Taylor 
Authored: Thu Nov 17 16:45:51 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 21:52:09 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/compile/UpsertCompiler.java  |  8 +++
 .../UngroupedAggregateRegionObserver.java   | 11 --
 .../function/ArrayConcatFunction.java   |  5 -
 .../function/ArrayModifierFunction.java | 10 -
 .../phoenix/schema/types/PArrayDataType.java|  8 +++
 .../apache/phoenix/schema/types/PBinary.java|  4 ++--
 .../org/apache/phoenix/schema/types/PChar.java  |  4 ++--
 .../apache/phoenix/schema/types/PDataType.java  | 22 +++-
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../apache/phoenix/schema/types/PVarbinary.java |  4 ++--
 .../apache/phoenix/schema/types/PVarchar.java   |  4 ++--
 .../phoenix/schema/types/PDataTypeTest.java | 22 
 13 files changed, 79 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2c4c96c7/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 9bbe23e..cb41b81 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -164,6 +164,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 }
 
 @Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
+
+@Test
 public void testUpsertRandomValues() throws Exception {
 long ts = nextTimestamp();
 Properties props = new Properties();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2c4c96c7/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 0922de2..8512ec4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -198,8 +198,8 @@ public class UpsertCompiler {
 Integer scale = rsScale == 0 ? null : rsScale;
 // We are guaranteed that the two column will have 
compatible types,
 // as we checked that before.
-if (!column.getDataType().isSizeCompatible(ptr, value, 
column.getDataType(), precision, scale,
-column.getMaxLength(), column.getScale())) { throw 
new SQLExceptionInfo.Builder(
+if (!column.getDataType().isSizeCompatible(ptr, value, 
column.getDataType(), SortOrder.getDefault(), precision,
+scale, column.getMaxLength(), column.getScale())) 
{ throw new SQLExceptionInfo.Builder(
 
SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setColumnName(column.getName().getStri

phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.0 00722bd27 -> a80043e27


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a80043e2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a80043e2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a80043e2

Branch: refs/heads/4.8-HBase-1.0
Commit: a80043e27d13b039b97aaf1896780041c0ed7e51
Parents: 00722bd
Author: James Taylor 
Authored: Thu Nov 17 16:45:51 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 21:50:23 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/compile/UpsertCompiler.java  | 14 +
 .../UngroupedAggregateRegionObserver.java   | 11 --
 .../function/ArrayConcatFunction.java   |  5 -
 .../function/ArrayModifierFunction.java | 10 -
 .../phoenix/schema/types/PArrayDataType.java|  8 +++
 .../apache/phoenix/schema/types/PBinary.java|  4 ++--
 .../org/apache/phoenix/schema/types/PChar.java  |  4 ++--
 .../apache/phoenix/schema/types/PDataType.java  | 22 +++-
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../apache/phoenix/schema/types/PVarbinary.java |  4 ++--
 .../apache/phoenix/schema/types/PVarchar.java   |  4 ++--
 .../phoenix/schema/types/PDataTypeTest.java | 22 
 13 files changed, 80 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a80043e2/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 9bbe23e..cb41b81 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -164,6 +164,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 }
 
 @Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
+
+@Test
 public void testUpsertRandomValues() throws Exception {
 long ts = nextTimestamp();
 Properties props = new Properties();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a80043e2/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 731ad58..8512ec4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -33,9 +33,6 @@ import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -86,14 +83,13 @@ import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTable.ViewType;
 import org.apache.phoenix.schema.PTableImpl;
+import org.apache.phoenix.s

phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.1 f92f7a661 -> 28fbb13f0


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/28fbb13f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/28fbb13f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/28fbb13f

Branch: refs/heads/4.8-HBase-1.1
Commit: 28fbb13f0da2758e8d8d71b478f92f6ecc17406b
Parents: f92f7a6
Author: James Taylor 
Authored: Thu Nov 17 16:45:51 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 21:44:48 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/compile/UpsertCompiler.java  | 15 +
 .../UngroupedAggregateRegionObserver.java   | 11 --
 .../function/ArrayConcatFunction.java   |  5 -
 .../function/ArrayModifierFunction.java | 10 -
 .../phoenix/schema/types/PArrayDataType.java|  8 +++
 .../apache/phoenix/schema/types/PBinary.java|  4 ++--
 .../org/apache/phoenix/schema/types/PChar.java  |  4 ++--
 .../apache/phoenix/schema/types/PDataType.java  | 22 +++-
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../apache/phoenix/schema/types/PVarbinary.java |  4 ++--
 .../apache/phoenix/schema/types/PVarchar.java   |  4 ++--
 .../phoenix/schema/types/PDataTypeTest.java | 22 
 13 files changed, 80 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/28fbb13f/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 9bbe23e..cb41b81 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -164,6 +164,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 }
 
 @Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
+
+@Test
 public void testUpsertRandomValues() throws Exception {
 long ts = nextTimestamp();
 Properties props = new Properties();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/28fbb13f/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 26855aa..8512ec4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -33,10 +33,6 @@ import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.client.HRegionLocator;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -87,14 +83,13 @@ import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTable.ViewType;
 import org.apache

Build failed in Jenkins: Phoenix-4.8-HBase-1.2 #59

2016-11-17 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H16 (ubuntu) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:799)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1055)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1086)
at hudson.scm.SCM.checkout(SCM.java:485)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1269)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:604)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:529)
at hudson.model.Run.execute(Run.java:1741)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:98)
at hudson.model.Executor.run(Executor.java:410)
Caused by: hudson.plugins.git.GitException: Command "git config 
remote.origin.url https://git-wip-us.apache.org/repos/asf/phoenix.git"; returned 
status code 4:
stdout: 
stderr: error: failed to write new configuration file 


at 
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1723)
at 
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1699)
at 
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1695)
at 
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommand(CliGitAPIImpl.java:1317)
at 
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommand(CliGitAPIImpl.java:1329)
at 
org.jenkinsci.plugins.gitclient.CliGitAPIImpl.setRemoteUrl(CliGitAPIImpl.java:1031)
at hudson.plugins.git.GitAPI.setRemoteUrl(GitAPI.java:160)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at 
hudson.remoting.RemoteInvocationHandler$RPCRequest.perform(RemoteInvocationHandler.java:884)
at 
hudson.remoting.RemoteInvocationHandler$RPCRequest.call(RemoteInvocationHandler.java:859)
at 
hudson.remoting.RemoteInvocationHandler$RPCRequest.call(RemoteInvocationHandler.java:818)
at hudson.remoting.UserRequest.perform(UserRequest.java:153)
at hudson.remoting.UserRequest.perform(UserRequest.java:50)
at hudson.remoting.Request$2.run(Request.java:332)
at 
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:68)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
at ..remote call to H16(Native Method)
at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1416)
at hudson.remoting.UserResponse.retrieve(UserRequest.java:253)
at hudson.remoting.Channel.call(Channel.java:781)
at 
hudson.remoting.RemoteInvocationHandler.invoke(RemoteInvocationHandler.java:249)
at com.sun.proxy.$Proxy151.setRemoteUrl(Unknown Source)
at 
org.jenkinsci.plugins.gitclient.RemoteGitImpl.setRemoteUrl(RemoteGitImpl.java:298)
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:787)
... 11 more
ERROR: null
Retrying after 10 seconds
 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:799)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1055)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1086)
at hudson.scm.SCM.checkout(SCM.java:485)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1269)
at 
hudson.mode

Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.2/58/

2016-11-17 Thread Apache Jenkins Server
[...truncated 61 lines...]


Build failed in Jenkins: Phoenix-4.8-HBase-1.2 #58

2016-11-17 Thread Apache Jenkins Server
See 

--
[...truncated 97 lines...]
remote: Compressing objects:  63% (27102/43018)   
remote: Compressing objects:  64% (27532/43018)   
remote: Compressing objects:  65% (27962/43018)   
remote: Compressing objects:  66% (28392/43018)   
remote: Compressing objects:  67% (28823/43018)   
remote: Compressing objects:  68% (29253/43018)   
remote: Compressing objects:  69% (29683/43018)   
remote: Compressing objects:  70% (30113/43018)   
remote: Compressing objects:  71% (30543/43018)   
remote: Compressing objects:  72% (30973/43018)   
remote: Compressing objects:  73% (31404/43018)   
remote: Compressing objects:  74% (31834/43018)   
remote: Compressing objects:  75% (32264/43018)   
remote: Compressing objects:  76% (32694/43018)   
remote: Compressing objects:  77% (33124/43018)   
remote: Compressing objects:  78% (33555/43018)   
remote: Compressing objects:  79% (33985/43018)   
remote: Compressing objects:  80% (34415/43018)   
remote: Compressing objects:  81% (34845/43018)   
remote: Compressing objects:  82% (35275/43018)   
remote: Compressing objects:  83% (35705/43018)   
remote: Compressing objects:  84% (36136/43018)   
remote: Compressing objects:  85% (36566/43018)   
remote: Compressing objects:  86% (36996/43018)   
remote: Compressing objects:  87% (37426/43018)   
remote: Compressing objects:  88% (37856/43018)   
remote: Compressing objects:  89% (38287/43018)   
remote: Compressing objects:  90% (38717/43018)   
remote: Compressing objects:  91% (39147/43018)   
remote: Compressing objects:  92% (39577/43018)   
remote: Compressing objects:  93% (40007/43018)   
remote: Compressing objects:  94% (40437/43018)   
remote: Compressing objects:  95% (40868/43018)   
remote: Compressing objects:  96% (41298/43018)   
remote: Compressing objects:  97% (41728/43018)   
remote: Compressing objects:  97% (42139/43018)   
remote: Compressing objects:  98% (42158/43018)   
remote: Compressing objects:  99% (42588/43018)   
remote: Compressing objects: 100% (43018/43018)   
remote: Compressing objects: 100% (43018/43018), done.
Receiving objects:   0% (1/104799)   
Receiving objects:   1% (1048/104799)   
Receiving objects:   2% (2096/104799)   
Receiving objects:   3% (3144/104799)   
Receiving objects:   4% (4192/104799)   
Receiving objects:   5% (5240/104799)   
Receiving objects:   6% (6288/104799)   
Receiving objects:   7% (7336/104799)   
Receiving objects:   8% (8384/104799)   
Receiving objects:   9% (9432/104799)   
Receiving objects:  10% (10480/104799)   
Receiving objects:  11% (11528/104799)   
Receiving objects:  12% (12576/104799)   
Receiving objects:  13% (13624/104799)   
Receiving objects:  14% (14672/104799)   
Receiving objects:  15% (15720/104799)   
Receiving objects:  16% (16768/104799)   
Receiving objects:  17% (17816/104799)   
Receiving objects:  18% (18864/104799)   
Receiving objects:  19% (19912/104799)   
Receiving objects:  20% (20960/104799)   
Receiving objects:  21% (22008/104799)   
Receiving objects:  22% (23056/104799)   
Receiving objects:  23% (24104/104799)   
Receiving objects:  24% (25152/104799)   
Receiving objects:  25% (26200/104799)   
Receiving objects:  26% (27248/104799)   
Receiving objects:  27% (28296/104799)   
Receiving objects:  28% (29344/104799)   
Receiving objects:  29% (30392/104799)   
Receiving objects:  30% (31440/104799)   
Receiving objects:  31% (32488/104799)   
Receiving objects:  32% (33536/104799)   
Receiving objects:  33% (34584/104799)   
Receiving objects:  34% (35632/104799)   
Receiving objects:  35% (36680/104799)   
Receiving objects:  36% (37728/104799)   
Receiving objects:  37% (38776/104799)   
Receiving objects:  38% (39824/104799)   
Receiving objects:  39% (40872/104799)   
Receiving objects:  40% (41920/104799)   
Receiving objects:  41% (42968/104799)   
Receiving objects:  42% (44016/104799)   
Receiving objects:  43% (45064/104799)   
Receiving objects:  44% (46112/104799)   
Receiving objects:  45% (47160/104799)   
Receiving objects:  46% (48208/104799)   
Receiving objects:  47% (49256/104799)   
Receiving objects:  48% (50304/104799)   
Receiving objects:  49% (51352/104799)   
Receiving objects:  50% (52400/104799)   
Receiving objects:  51% (53448/104799)   
Receiving objects:  52% (54496/104799)   
Receiving objects:  53% (55544/104799)   
Receiving objects:  54% (56592/104799)   
Receiving objects:  55% (57640/104799)   
Receiving objects:  56% (58688/104799)   
fatal: write error: No space left on device
fatal: index-pack failed

at 
org.jenkinsci.plugins.gitclient.CliGitA

Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.2/57/

2016-11-17 Thread Apache Jenkins Server
[...truncated 53 lines...]


Jenkins build is back to normal : Phoenix-4.8-HBase-1.2 #57

2016-11-17 Thread Apache Jenkins Server
See 



Jenkins build is back to normal : Phoenix-4.8-HBase-1.1 #54

2016-11-17 Thread Apache Jenkins Server
See 



Jenkins build is back to normal : Phoenix | Master #1500

2016-11-17 Thread Apache Jenkins Server
See 



Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2016-11-17 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-0.98

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[jamestaylor] PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

[samarth] Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Jenkins build is back to normal : Phoenix-4.8-HBase-0.98 #54

2016-11-17 Thread Apache Jenkins Server
See 



phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.2 5f7682bb9 -> 16795aef6


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/16795aef
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/16795aef
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/16795aef

Branch: refs/heads/4.8-HBase-1.2
Commit: 16795aef6d0f22e96bf787ec50b9043d61c0c379
Parents: 5f7682b
Author: James Taylor 
Authored: Thu Nov 17 16:45:51 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 18:15:29 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/compile/UpsertCompiler.java  | 15 +
 .../UngroupedAggregateRegionObserver.java   | 11 --
 .../function/ArrayConcatFunction.java   |  5 -
 .../function/ArrayModifierFunction.java | 10 -
 .../phoenix/schema/types/PArrayDataType.java|  8 +++
 .../apache/phoenix/schema/types/PBinary.java|  4 ++--
 .../org/apache/phoenix/schema/types/PChar.java  |  4 ++--
 .../apache/phoenix/schema/types/PDataType.java  | 22 +++-
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../apache/phoenix/schema/types/PVarbinary.java |  4 ++--
 .../apache/phoenix/schema/types/PVarchar.java   |  4 ++--
 .../phoenix/schema/types/PDataTypeTest.java | 22 
 13 files changed, 80 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 9bbe23e..cb41b81 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -164,6 +164,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 }
 
 @Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
+
+@Test
 public void testUpsertRandomValues() throws Exception {
 long ts = nextTimestamp();
 Properties props = new Properties();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/16795aef/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 26855aa..8512ec4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -33,10 +33,6 @@ import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.client.HRegionLocator;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -87,14 +83,13 @@ import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTable.ViewType;
 import org.apache

Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2016-11-17 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[jamestaylor] PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/51/

2016-11-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/51/


Affected test class(es):
Set(['org.apache.phoenix.end2end.AlterTableIT', 
'org.apache.phoenix.end2end.CsvBulkLoadToolIT', 
'org.apache.phoenix.end2end.QueryTimeoutIT', 
'org.apache.phoenix.end2end.MutableIndexToolIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 a90ee616c -> 940fd36b5


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/940fd36b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/940fd36b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/940fd36b

Branch: refs/heads/4.x-HBase-1.1
Commit: 940fd36b597b8f1c3fb085367e44f99e9f0ff061
Parents: a90ee61
Author: James Taylor 
Authored: Thu Nov 17 17:02:17 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 17:52:12 2016 -0800

--
 .../java/org/apache/phoenix/schema/types/PDataType.java | 12 ++--
 .../org/apache/phoenix/schema/types/PDataTypeTest.java  |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/940fd36b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 18956e8..de1e63f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -692,29 +692,29 @@ public abstract class PDataType implements 
DataType, Comparablehttp://git-wip-us.apache.org/repos/asf/phoenix/blob/940fd36b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
index d07364c..c28e5b1 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
@@ -1671,7 +1671,7 @@ public class PDataTypeTest {
 }
 
 private void testReadDecimalPrecisionAndScaleFromRawBytes(BigDecimal bd, 
SortOrder sortOrder) {
-byte[] b = PDecimal.INSTANCE.toBytes(bd);
+byte[] b = PDecimal.INSTANCE.toBytes(bd, sortOrder);
 int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length, 
sortOrder);
 assertEquals(bd.toString(), bd.precision(), v[0]);
 assertEquals(bd.toString(), bd.scale(), v[1]);



Build failed in Jenkins: Phoenix | Master #1499

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[jamestaylor] PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

--
[...truncated 891 lines...]
Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.FlappingLocalIndexIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 46.541 sec - in 
org.apache.phoenix.end2end.ConnectionUtilIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.264 sec - in 
org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.522 sec - in 
org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.IndexExtendedIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.156 sec - in 
org.apache.phoenix.end2end.QueryWithLimitIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.14 sec - in 
org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.RenewLeaseIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.996 sec - in 
org.apache.phoenix.end2end.RenewLeaseIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.085 sec - in 
org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 109.274 sec - 
in org.apache.phoenix.end2end.CsvBulkLoadToolIT
Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 115.965 sec - 
in org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.end2end.index.LocalIndexIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 45.157 sec - in 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 93.27 sec - in 
org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.317 sec - in 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 196.029 sec - 
in org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 277.225 sec - 
in org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.518 sec - in 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Running 
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 274.722 sec - 
in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 92.177 sec - in 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 80, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 383.886 sec - 
in org.apache.phoenix.end2end.IndexExtendedIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.948 sec - in 
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Running org.apache.phoenix.execute.PartialCommitIT
Running org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.88 sec - in 
org.apache.phoenix.execute.PartialCommitIT
Running org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.968 sec - in 
org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running 
org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.209 sec - in 
org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.phoenix.monitoring.PhoenixMetricsIT
Running org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.943 sec - in 
org.apache.phoenix.rpc.Phoe

Jenkins build is back to normal : Phoenix | 4.x-HBase-0.98 #1390

2016-11-17 Thread Apache Jenkins Server
See 



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.1 c15a18a82 -> f92f7a661


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f92f7a66
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f92f7a66
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f92f7a66

Branch: refs/heads/4.8-HBase-1.1
Commit: f92f7a6614478c677e0cb9f92f3b3e1996121b38
Parents: c15a18a
Author: Samarth 
Authored: Thu Nov 17 17:18:43 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:18:43 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f92f7a66/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index b850536..7cc292c 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -67,6 +67,7 @@ import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -123,7 +124,8 @@ public class LocalIndexIT extends BaseHBaseManagedTimeIT {
 return Arrays.asList(true, false);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496
 public void testLocalIndexRoundTrip() throws Exception {
 createBaseTable(tableName, null, null);
 Connection conn1 = DriverManager.getConnection(getUrl());



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.2 9521c6cf1 -> 5f7682bb9


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5f7682bb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5f7682bb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5f7682bb

Branch: refs/heads/4.8-HBase-1.2
Commit: 5f7682bb9e758c557ff8e82a1930bec69322350e
Parents: 9521c6c
Author: Samarth 
Authored: Thu Nov 17 17:19:11 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:19:11 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5f7682bb/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index b850536..7cc292c 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -67,6 +67,7 @@ import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -123,7 +124,8 @@ public class LocalIndexIT extends BaseHBaseManagedTimeIT {
 return Arrays.asList(true, false);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496
 public void testLocalIndexRoundTrip() throws Exception {
 createBaseTable(tableName, null, null);
 Connection conn1 = DriverManager.getConnection(getUrl());



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.0 a06a76387 -> 00722bd27


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/00722bd2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/00722bd2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/00722bd2

Branch: refs/heads/4.8-HBase-1.0
Commit: 00722bd278c1247e10a0b09737ef16a1147249fd
Parents: a06a763
Author: Samarth 
Authored: Thu Nov 17 17:17:57 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:18:10 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/00722bd2/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index b850536..7cc292c 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -67,6 +67,7 @@ import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -123,7 +124,8 @@ public class LocalIndexIT extends BaseHBaseManagedTimeIT {
 return Arrays.asList(true, false);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496
 public void testLocalIndexRoundTrip() throws Exception {
 createBaseTable(tableName, null, null);
 Connection conn1 = DriverManager.getConnection(getUrl());



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 c246b021e -> 5e29f8306


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5e29f830
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5e29f830
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5e29f830

Branch: refs/heads/4.8-HBase-0.98
Commit: 5e29f8306f2b78980f64e8f8910775dacc84a662
Parents: c246b02
Author: Samarth 
Authored: Thu Nov 17 17:17:27 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:17:27 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5e29f830/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 68bf45c..1b0fa17 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -68,6 +68,7 @@ import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -124,7 +125,8 @@ public class LocalIndexIT extends BaseHBaseManagedTimeIT {
 return Arrays.asList(true, false);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496
 public void testLocalIndexRoundTrip() throws Exception {
 createBaseTable(tableName, null, null);
 Connection conn1 = DriverManager.getConnection(getUrl());



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/master 29c025e25 -> 404b4ede7


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/404b4ede
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/404b4ede
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/404b4ede

Branch: refs/heads/master
Commit: 404b4ede75f0661acad02caf77af19494cc3fc0c
Parents: 29c025e
Author: Samarth 
Authored: Thu Nov 17 17:15:34 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:15:48 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/404b4ede/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 4ef98a3..785a324 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -57,6 +57,7 @@ import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
+import org.junit.Ignore;
 import org.junit.Test;
 
 public class LocalIndexIT extends BaseLocalIndexIT {
@@ -64,7 +65,8 @@ public class LocalIndexIT extends BaseLocalIndexIT {
 super(isNamespaceMapped);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496 
 public void testLocalIndexRoundTrip() throws Exception {
 String tableName = schemaName + "." + generateUniqueName();
 String indexName = "IDX_" + generateUniqueName();



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 d2ec99c39 -> a90ee616c


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a90ee616
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a90ee616
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a90ee616

Branch: refs/heads/4.x-HBase-1.1
Commit: a90ee616cc840eba0d4249d387a4cbad83037aa2
Parents: d2ec99c
Author: Samarth 
Authored: Thu Nov 17 17:16:12 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:16:12 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a90ee616/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 4ef98a3..785a324 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -57,6 +57,7 @@ import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
+import org.junit.Ignore;
 import org.junit.Test;
 
 public class LocalIndexIT extends BaseLocalIndexIT {
@@ -64,7 +65,8 @@ public class LocalIndexIT extends BaseLocalIndexIT {
 super(isNamespaceMapped);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496 
 public void testLocalIndexRoundTrip() throws Exception {
 String tableName = schemaName + "." + generateUniqueName();
 String indexName = "IDX_" + generateUniqueName();



phoenix git commit: Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 6ca6bdeee -> 6e514f8e6


Ignore LocalIndexIT#testLocalIndexRoundTrip till PHOENIX-3496 is fixed


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6e514f8e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6e514f8e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6e514f8e

Branch: refs/heads/4.x-HBase-0.98
Commit: 6e514f8e6e3debf2b84f828e5bd99cdef2e19a27
Parents: 6ca6bde
Author: Samarth 
Authored: Thu Nov 17 17:14:39 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 17:15:05 2016 -0800

--
 .../it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6e514f8e/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 4ef98a3..785a324 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -57,6 +57,7 @@ import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
+import org.junit.Ignore;
 import org.junit.Test;
 
 public class LocalIndexIT extends BaseLocalIndexIT {
@@ -64,7 +65,8 @@ public class LocalIndexIT extends BaseLocalIndexIT {
 super(isNamespaceMapped);
 }
 
-@Test
+@Ignore
+//FIXME: PHOENIX-3496 
 public void testLocalIndexRoundTrip() throws Exception {
 String tableName = schemaName + "." + generateUniqueName();
 String indexName = "IDX_" + generateUniqueName();



phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 0a70cb8ac -> 6ca6bdeee


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6ca6bdee
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6ca6bdee
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6ca6bdee

Branch: refs/heads/4.x-HBase-0.98
Commit: 6ca6bdeee65f40a5e7949c2c595d7bef9c9088a6
Parents: 0a70cb8
Author: James Taylor 
Authored: Thu Nov 17 17:02:17 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 17:03:27 2016 -0800

--
 .../java/org/apache/phoenix/schema/types/PDataType.java | 12 ++--
 .../org/apache/phoenix/schema/types/PDataTypeTest.java  |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6ca6bdee/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 18956e8..de1e63f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -692,29 +692,29 @@ public abstract class PDataType implements 
DataType, Comparablehttp://git-wip-us.apache.org/repos/asf/phoenix/blob/6ca6bdee/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
index d07364c..c28e5b1 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
@@ -1671,7 +1671,7 @@ public class PDataTypeTest {
 }
 
 private void testReadDecimalPrecisionAndScaleFromRawBytes(BigDecimal bd, 
SortOrder sortOrder) {
-byte[] b = PDecimal.INSTANCE.toBytes(bd);
+byte[] b = PDecimal.INSTANCE.toBytes(bd, sortOrder);
 int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length, 
sortOrder);
 assertEquals(bd.toString(), bd.precision(), v[0]);
 assertEquals(bd.toString(), bd.scale(), v[1]);



phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 8e2f087bd -> 29c025e25


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/29c025e2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/29c025e2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/29c025e2

Branch: refs/heads/master
Commit: 29c025e25430bb9705ec15896f6d02aa7a5e1131
Parents: 8e2f087
Author: James Taylor 
Authored: Thu Nov 17 17:02:17 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 17:02:17 2016 -0800

--
 .../java/org/apache/phoenix/schema/types/PDataType.java | 12 ++--
 .../org/apache/phoenix/schema/types/PDataTypeTest.java  |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/29c025e2/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 18956e8..de1e63f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -692,29 +692,29 @@ public abstract class PDataType implements 
DataType, Comparablehttp://git-wip-us.apache.org/repos/asf/phoenix/blob/29c025e2/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
index d07364c..c28e5b1 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeTest.java
@@ -1671,7 +1671,7 @@ public class PDataTypeTest {
 }
 
 private void testReadDecimalPrecisionAndScaleFromRawBytes(BigDecimal bd, 
SortOrder sortOrder) {
-byte[] b = PDecimal.INSTANCE.toBytes(bd);
+byte[] b = PDecimal.INSTANCE.toBytes(bd, sortOrder);
 int[] v = PDataType.getDecimalPrecisionAndScale(b, 0, b.length, 
sortOrder);
 assertEquals(bd.toString(), bd.precision(), v[0]);
 assertEquals(bd.toString(), bd.scale(), v[1]);



phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 af564008b -> 0a70cb8ac


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0a70cb8a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0a70cb8a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0a70cb8a

Branch: refs/heads/4.x-HBase-0.98
Commit: 0a70cb8ac2e46ee44c44bf6219565ffa43f45e5e
Parents: af56400
Author: James Taylor 
Authored: Thu Nov 17 16:12:44 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 16:15:11 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/schema/types/PDataType.java  |  4 ++--
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../phoenix/schema/types/PDataTypeTest.java | 18 +++-
 4 files changed, 40 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0a70cb8a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 498c4a3..eb81ae3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -162,6 +162,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 assertEquals("a", rs.getString(1));
 assertEquals("2013-06-08 00:00:00.000", rs.getString(2));
 }
+
+@Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
 
 @Test
 public void testUpsertRandomValues() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0a70cb8a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 58018ac..18956e8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -689,9 +689,9 @@ public abstract class PDataType implements DataType, 
Comparablehttp://git-wip-us.apache.org/repos/asf/phoenix/blob/0a70cb8a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
index 9fff730..b76febb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
@@ -321,8 +321,8 @@ public class PDecimal extends PRealNumber {
 maxLength = v.precision();
 scale = v.scale();
 } else {
-this.coerceBytes(ptr, value, srcType, maxLength, scale, 
SortOrder.getDefault(), desiredMaxLength, desiredScale, sortOrder, true);
-int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), 
ptr.getLength());
+this.coerceBytes(ptr, value, srcType, maxLength, scale, sortOrder, 
desiredMaxLength, desiredScale, sortOrder, true);
+int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), 
ptr.getLength(), sortOrder);
 maxLength = v[0];
 scale = v[1];
 }
@@ -352,7 +352,7 @@ public class PD

phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 27c24682d -> d2ec99c39


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d2ec99c3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d2ec99c3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d2ec99c3

Branch: refs/heads/4.x-HBase-1.1
Commit: d2ec99c39cf524d695889a0d0216a70101ff1d72
Parents: 27c2468
Author: James Taylor 
Authored: Thu Nov 17 16:12:44 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 16:13:55 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/schema/types/PDataType.java  |  4 ++--
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../phoenix/schema/types/PDataTypeTest.java | 18 +++-
 4 files changed, 40 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d2ec99c3/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 498c4a3..eb81ae3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -162,6 +162,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 assertEquals("a", rs.getString(1));
 assertEquals("2013-06-08 00:00:00.000", rs.getString(2));
 }
+
+@Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
 
 @Test
 public void testUpsertRandomValues() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d2ec99c3/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 58018ac..18956e8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -689,9 +689,9 @@ public abstract class PDataType implements DataType, 
Comparablehttp://git-wip-us.apache.org/repos/asf/phoenix/blob/d2ec99c3/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
index 9fff730..b76febb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
@@ -321,8 +321,8 @@ public class PDecimal extends PRealNumber {
 maxLength = v.precision();
 scale = v.scale();
 } else {
-this.coerceBytes(ptr, value, srcType, maxLength, scale, 
SortOrder.getDefault(), desiredMaxLength, desiredScale, sortOrder, true);
-int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), 
ptr.getLength());
+this.coerceBytes(ptr, value, srcType, maxLength, scale, sortOrder, 
desiredMaxLength, desiredScale, sortOrder, true);
+int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), 
ptr.getLength(), sortOrder);
 maxLength = v[0];
 scale = v[1];
 }
@@ -352,7 +352,7 @@ public class PDec

phoenix git commit: PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master d59f0e539 -> 8e2f087bd


PHOENIX-3494 ArrayIndexOutOfBoundsException with decimal desc key


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8e2f087b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8e2f087b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8e2f087b

Branch: refs/heads/master
Commit: 8e2f087bdb3b86fcbdf2e7e00918b8abb2ac8752
Parents: d59f0e5
Author: James Taylor 
Authored: Thu Nov 17 16:12:44 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 16:12:44 2016 -0800

--
 .../apache/phoenix/end2end/UpsertValuesIT.java  | 22 
 .../apache/phoenix/schema/types/PDataType.java  |  4 ++--
 .../apache/phoenix/schema/types/PDecimal.java   |  6 +++---
 .../phoenix/schema/types/PDataTypeTest.java | 18 +++-
 4 files changed, 40 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8e2f087b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
index 498c4a3..eb81ae3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpsertValuesIT.java
@@ -162,6 +162,28 @@ public class UpsertValuesIT extends 
BaseClientManagedTimeIT {
 assertEquals("a", rs.getString(1));
 assertEquals("2013-06-08 00:00:00.000", rs.getString(2));
 }
+
+@Test
+public void testUpsertValuesWithDescDecimal() throws Exception {
+long ts = nextTimestamp();
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("create table UpsertDecimalDescTest (k 
DECIMAL(12,3) NOT NULL PRIMARY KEY DESC)");
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+5));
+conn = DriverManager.getConnection(getUrl(), props);
+conn.createStatement().execute("upsert into UpsertDecimalDescTest 
values (0.0)");
+conn.commit();
+conn.close();
+
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts+10));
+conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery("select k from 
UpsertDecimalDescTest");
+assertTrue(rs.next());
+assertEquals(0.0, rs.getDouble(1), 0.001);
+}
 
 @Test
 public void testUpsertRandomValues() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8e2f087b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 58018ac..18956e8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -689,9 +689,9 @@ public abstract class PDataType implements DataType, 
Comparablehttp://git-wip-us.apache.org/repos/asf/phoenix/blob/8e2f087b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
index 9fff730..b76febb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDecimal.java
@@ -321,8 +321,8 @@ public class PDecimal extends PRealNumber {
 maxLength = v.precision();
 scale = v.scale();
 } else {
-this.coerceBytes(ptr, value, srcType, maxLength, scale, 
SortOrder.getDefault(), desiredMaxLength, desiredScale, sortOrder, true);
-int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), 
ptr.getLength());
+this.coerceBytes(ptr, value, srcType, maxLength, scale, sortOrder, 
desiredMaxLength, desiredScale, sortOrder, true);
+int[] v = getDecimalPrecisionAndScale(ptr.get(), ptr.getOffset(), 
ptr.getLength(), sortOrder);
 maxLength = v[0];
 scale = v[1];
 }
@@ -352,7 +352,7 @@ public class PDecimal extends P

Build failed in Jenkins: Phoenix | Master #1498

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[ssa] PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

--
[...truncated 896 lines...]
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.421 sec - in 
org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.IndexExtendedIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.926 sec - in 
org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 46.48 sec - in 
org.apache.phoenix.end2end.ConnectionUtilIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.138 sec - in 
org.apache.phoenix.end2end.QueryWithLimitIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.371 sec - in 
org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.RenewLeaseIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.557 sec - in 
org.apache.phoenix.end2end.RenewLeaseIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.622 sec - in 
org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 109.206 sec - 
in org.apache.phoenix.end2end.CsvBulkLoadToolIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 108.801 sec - 
in org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.end2end.index.LocalIndexIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 47.929 sec - in 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 93.623 sec - 
in org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 40.093 sec - in 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 198.655 sec - 
in org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 278.057 sec - 
in org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 80, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 369.971 sec - 
in org.apache.phoenix.end2end.IndexExtendedIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.807 sec - in 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 265.95 sec - 
in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Running 
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 96.189 sec - in 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Running org.apache.phoenix.execute.PartialCommitIT
Running org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.982 sec - in 
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.853 sec - in 
org.apache.phoenix.execute.PartialCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.239 sec - in 
org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Running 
org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.phoenix.monitoring.PhoenixMetricsIT
Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.861 sec - in 
org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.975 sec - in 
org.apache.phoenix.rpc.PhoenixClientRpcI

Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2016-11-17 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[ssa] PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Build failed in Jenkins: Phoenix-4.8-HBase-1.1 #53

2016-11-17 Thread Apache Jenkins Server
See 

--
[...truncated 1178 lines...]
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1024)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1708)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1783)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1740)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1020)
at 
org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:3078)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2853)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2795)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.doBatchOp(RSRpcServices.java:700)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.doNonAtomicRegionMutation(RSRpcServices.java:662)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.multi(RSRpcServices.java:2046)
at 
org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:129)
at 
org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:54)
at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
at 
org.apache.hadoop.hbase.client.AsyncProcess$AsyncRequestFutureImpl$SingleServerRequestRunnable.run(AsyncProcess.java:708)
at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
: 2 times, 
at 
org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:228)
at 
org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$1700(AsyncProcess.java:208)
at 
org.apache.hadoop.hbase.client.AsyncProcess$AsyncRequestFutureImpl.getErrors(AsyncProcess.java:1599)
at org.apache.hadoop.hbase.client.HTable.batch(HTable.java:936)
at org.apache.hadoop.hbase.client.HTable.batch(HTable.java:950)
at 
org.apache.hadoop.hbase.client.HTableWrapper.batch(HTableWrapper.java:255)
at 
org.apache.phoenix.execute.DelegateHTable.batch(DelegateHTable.java:94)
at 
org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:167)
at 
org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:131)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
... 1 more
: 1 time, 
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT.helpTestWriteFailureDisablesIndex(MutableIndexFailureIT.java:225)
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT.testWriteFailureDisablesIndex(MutableIndexFailureIT.java:127)

testWriteFailureDisablesIndex[transactional = false, localIndex = true, 
isNamespaceMapped = 
false](org.apache.phoenix.end2end.index.MutableIndexFailureIT)  Time elapsed: 
25.746 sec  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:400)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1024)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1708)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1783)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1740)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1020)
at 
org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:3078)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2853)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2795)
at 
org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.commitBatch(Ung

Build failed in Jenkins: Phoenix | 4.x-HBase-0.98 #1389

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[ssa] PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

--
[...truncated 724 lines...]
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.959 sec - in 
org.apache.phoenix.end2end.index.ViewIndexIT
Running org.apache.phoenix.end2end.index.txn.MutableRollbackIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.337 sec - in 
org.apache.phoenix.end2end.index.txn.MutableRollbackIT
Running org.apache.phoenix.end2end.index.txn.RollbackIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 39.795 sec - in 
org.apache.phoenix.end2end.index.txn.RollbackIT
Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.692 sec - in 
org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Running org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.489 sec - in 
org.apache.phoenix.end2end.salted.SaltedTableVarLengthRowKeyIT
Running org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.429 sec - in 
org.apache.phoenix.iterate.PhoenixQueryTimeoutIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorIT
Tests run: 102, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 787.159 sec - 
in org.apache.phoenix.end2end.SortMergeJoinIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.048 sec - in 
org.apache.phoenix.iterate.RoundRobinResultIteratorIT
Running org.apache.phoenix.rpc.UpdateCacheIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.162 sec - in 
org.apache.phoenix.rpc.UpdateCacheIT
Running org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.936 sec - in 
org.apache.phoenix.trace.PhoenixTableMetricsWriterIT
Running org.apache.phoenix.trace.PhoenixTraceReaderIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.114 sec - in 
org.apache.phoenix.trace.PhoenixTraceReaderIT
Running org.apache.phoenix.trace.PhoenixTracingEndToEndIT
Tests run: 66, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 336.979 sec - 
in org.apache.phoenix.end2end.index.IndexExpressionIT
Running org.apache.phoenix.tx.FlappingTransactionIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.01 sec - in 
org.apache.phoenix.tx.FlappingTransactionIT
Running org.apache.phoenix.tx.TxCheckpointIT
Running org.apache.phoenix.tx.TransactionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 82.453 sec - in 
org.apache.phoenix.trace.PhoenixTracingEndToEndIT
Tests run: 19, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 68.309 sec - 
in org.apache.phoenix.tx.TransactionIT
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 111.075 sec - 
in org.apache.phoenix.tx.TxCheckpointIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 475.848 sec - 
in org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 144, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 672.448 sec - 
in org.apache.phoenix.end2end.index.IndexIT

Results :

Tests run: 1627, Failures: 0, Errors: 0, Skipped: 1

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(ClientManagedTimeTests) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT
Running org.apache.phoenix.end2end.ArrayIT
Running org.apache.phoenix.end2end.CaseStatementIT
Running org.apache.phoenix.end2end.AggregateQueryIT
Running org.apache.phoenix.end2end.CastAndCoerceIT
Tests run: 63, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 47.186 sec - 
in org.apache.phoenix.end2end.CaseStatementIT
Running org.apache.phoenix.end2end.ColumnProjectionOptimizationIT
Tests run: 49, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.544 sec - 
in org.apache.phoenix.end2end.CastAndCoerceIT
Running org.apache.phoenix.end2end.CreateSchemaIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.34 sec - in 
org.apache.phoenix.end2end.CreateSchemaIT
Running org.apache.phoenix.end2end.CreateTableIT
Tests run: 49, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.732 sec - 
in org.apache.phoenix.end2end.AggregateQueryIT
Running org.apache.phoenix.end2end.CustomEntityDataIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 21.442 sec - in 
org.apache.phoenix.end2end.ColumnProjectionOptimizationIT
Running org.apache.phoenix.end2end.DerivedTableIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.537 sec - in 
org.apache.phoenix.end2end.CustomEntityDataIT
Running org.apache.phoenix.end2end.DistinctCountIT
Tests run: 79, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 78.766 sec 

Build failed in Jenkins: Phoenix | Master #1497

2016-11-17 Thread Apache Jenkins Server
See 

--
[...truncated 1067 lines...]

Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException: 
org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException: Column family 
L#0 does not exist in region 
TEST_TABLET55,\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00,1479420056954.b92637a09f4b36949d82698da039ab91.
 in table 'TEST_TABLET55', {TABLE_ATTRIBUTES => {coprocessor$1 => 
'|org.apache.phoenix.coprocessor.ScanRegionObserver|805306366|', coprocessor$2 
=> 
'|org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver|805306366|', 
coprocessor$3 => 
'|org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver|805306366|', 
coprocessor$4 => 
'|org.apache.phoenix.coprocessor.ServerCachingEndpointImpl|805306366|', 
coprocessor$5 => 
'|org.apache.phoenix.hbase.index.Indexer|805306366|index.builder=org.apache.phoenix.index.PhoenixIndexBuilder,org.apache.hadoop.hbase.index.codec.class=org.apache.phoenix.index.PhoenixIndexCodec'},
 {NAME => '0', DATA_BLOCK_ENCODING => 'FAST_DIFF', BLOOMFILTER => 'ROW', 
REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS => '1', TTL => 
'FOREVER', MIN_VERSIONS => '0', KEEP_DELETED_CELLS => 'FALSE', BLOCKSIZE => 
'65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}
at 
org.apache.hadoop.hbase.regionserver.HRegion.checkFamily(HRegion.java:7649)
at 
org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:2543)
at 
org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:2527)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.scan(RSRpcServices.java:2406)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33648)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2170)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:109)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)


Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.741 sec - in 
org.apache.phoenix.rpc.PhoenixServerRpcIT
Tests run: 8, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 366.505 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.MutableIndexFailureIT
testWriteFailureDisablesIndex[MutableIndexFailureIT_transactional=false,localIndex=true,isNamespaceMapped=true](org.apache.phoenix.end2end.index.MutableIndexFailureIT)
  Time elapsed: 30.271 sec  <<< ERROR!
org.apache.phoenix.execute.CommitException: 
org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException: Failed 1 
action: org.apache.hadoop.hbase.DoNotRetryIOException: Failed 2 actions: 
org.apache.hadoop.hbase.DoNotRetryIOException
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:405)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1007)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1673)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1749)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1705)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1003)
at 
org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:3080)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2867)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2809)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.doBatchOp(RSRpcServices.java:751)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.doNonAtomicRegionMutation(RSRpcServices.java:713)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.multi(RSRpcServices.java:2148)
at 
org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:128)
at 
org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:53)
at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200)
at 
org.apache.hadoop.hbase.client.AsyncProcess$AsyncRequestFutureImpl$SingleServerRequestRunnable.run(AsyncProcess.java:733)
at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)

Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2016-11-17 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[ssa] PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Build failed in Jenkins: Phoenix-4.8-HBase-0.98 #53

2016-11-17 Thread Apache Jenkins Server
See 

--
[...truncated 1058 lines...]
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost.java:1009)
at 
org.apache.hadoop.hbase.regionserver.HRegion.doMiniBatchMutation(HRegion.java:2580)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2359)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2314)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:2318)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.doBatchOp(HRegionServer.java:4678)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.doNonAtomicRegionMutation(HRegionServer.java:3835)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:3680)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32500)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2195)
at 
org.apache.hadoop.hbase.client.CoprocessorHConnection$1.callBlockingMethod(CoprocessorHConnection.java:144)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.multi(ClientProtos.java:32986)
at 
org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:113)
at 
org.apache.hadoop.hbase.client.MultiServerCallable.call(MultiServerCallable.java:51)
at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:193)
at 
org.apache.hadoop.hbase.client.AsyncProcess$1.run(AsyncProcess.java:622)
at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
: 2 times, servers with issues: pietas.apache.org,40983,1479417962888, 
at 
org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.makeException(AsyncProcess.java:211)
at 
org.apache.hadoop.hbase.client.AsyncProcess$BatchErrors.access$500(AsyncProcess.java:195)
at 
org.apache.hadoop.hbase.client.AsyncProcess.getErrors(AsyncProcess.java:1082)
at 
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatchCallback(HConnectionManager.java:2479)
at org.apache.hadoop.hbase.client.HTable.batchCallback(HTable.java:898)
at org.apache.hadoop.hbase.client.HTable.batchCallback(HTable.java:913)
at org.apache.hadoop.hbase.client.HTable.batch(HTable.java:888)
at 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost$Environment$HTableWrapper.batch(CoprocessorHost.java:595)
at 
org.apache.phoenix.execute.DelegateHTable.batch(DelegateHTable.java:94)
at 
org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:169)
at 
org.apache.phoenix.hbase.index.write.ParallelWriterIndexCommitter$1.call(ParallelWriterIndexCommitter.java:134)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
... 1 more
: 1 time, servers with issues: pietas.apache.org,40983,1479417962888, 
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT.helpTestWriteFailureDisablesIndex(MutableIndexFailureIT.java:225)
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT.testWriteFailureDisablesIndex(MutableIndexFailureIT.java:127)

testWriteFailureDisablesIndex[transactional = false, localIndex = true, 
isNamespaceMapped = 
false](org.apache.phoenix.end2end.index.MutableIndexFailureIT)  Time elapsed: 
12.65 sec  <<< ERROR!
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.phoenix.exception.PhoenixIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException
at 
org.apache.phoenix.end2end.index.MutableIndexFailureIT$FailingRegionObserver.preBatchMutate(MutableIndexFailureIT.java:400)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$35.call(RegionCoprocessorHost.java:1013)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1656)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1733)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1688)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preBatchMutate(RegionCoprocessorHost

Build failed in Jenkins: Phoenix-4.8-HBase-1.0 #50

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[jamestaylor] PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be

--
[...truncated 846 lines...]
Tests run: 21, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 159.426 sec - 
in org.apache.phoenix.tx.TransactionIT
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 106.133 sec - 
in org.apache.phoenix.tx.TxCheckpointIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 602.042 sec - 
in org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 136, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 859.333 sec - 
in org.apache.phoenix.end2end.index.IndexIT

Results :

Tests in error: 
  LocalIndexIT.testLocalIndexRoundTrip:155 » PhoenixIO 
org.apache.phoenix.except...
  LocalIndexIT.testLocalIndexRoundTrip:155 » PhoenixIO 
org.apache.phoenix.except...

Tests run: 1241, Failures: 0, Errors: 2, Skipped: 5

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTableReuseTest) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.AlterSessionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.185 sec - in 
org.apache.phoenix.end2end.AlterSessionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.53 sec - in 
org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.ArrayToStringFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.697 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.986 sec - in 
org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.015 sec - 
in org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 36, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.78 sec - in 
org.apache.phoenix.end2end.ArrayToStringFunctionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.062 sec - in 
org.apache.phoenix.end2end.DecodeFunctionIT
Running org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.035 sec - in 
org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Running org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.975 sec - 
in org.apache.phoenix.end2end.ArraysWithNullsIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.335 sec - in 
org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.923 sec - in 
org.apache.phoenix.end2end.DynamicFamilyIT
Running org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.225 sec - in 
org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Running org.apache.phoenix.end2end.LikeExpressionIT
Running org.apache.phoenix.end2end.MD5FunctionIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.912 sec - in 
org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.389 sec - in 
org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Running org.apache.phoenix.end2end.NthValueFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.339 sec - in 
org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.959 sec - in 
org.apache.phoenix.end2end.LikeExpressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.283 sec - in 
org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Running org.apache.phoenix.end2end.PrimitiveTypeIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.595 sec - in 
org.apache.phoenix.end2end.PrimitiveTypeIT
Running org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.QueryMoreIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.917 sec - in 
org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.RTrimFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.989 sec - 
in org.apache.phoeni

Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2016-11-17 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-0.98

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[ssa] PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


[2/3] phoenix git commit: PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

2016-11-17 Thread ssa
PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/af564008
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/af564008
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/af564008

Branch: refs/heads/4.x-HBase-0.98
Commit: af564008bf8e4853cef1aea85a2e117d6756d8c0
Parents: 3f1be1a
Author: Jeongdae Kim 
Authored: Tue Nov 8 11:26:29 2016 +0900
Committer: Sergey Soldatov 
Committed: Thu Nov 17 12:51:10 2016 -0800

--
 .../phoenix/hive/mapreduce/PhoenixInputFormat.java  |  5 ++---
 .../phoenix/hive/util/PhoenixStorageHandlerUtil.java| 12 
 2 files changed, 14 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/af564008/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index e3d0212..7e2f3d1 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.*;
@@ -110,7 +109,7 @@ public class PhoenixInputFormat 
implements InputFormat columnTypeMap = 
PhoenixStorageHandlerUtil.createColumnTypeMap
 (jobConf);
@@ -120,7 +119,7 @@ public class PhoenixInputFormat 
implements InputFormathttp://git-wip-us.apache.org/repos/asf/phoenix/blob/af564008/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
index 0dd1134..18799a5 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.net.DNS;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
@@ -46,7 +47,10 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -205,6 +209,14 @@ public class PhoenixStorageHandlerUtil {
 return columnTypeMap;
 }
 
+public static List getReadColumnNames(Configuration conf) {
+String colNames = 
conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR);
+if (colNames != null && !colNames.isEmpty()) {
+return 
Arrays.asList(colNames.split(PhoenixStorageHandlerConstants.COMMA));
+}
+return Collections.EMPTY_LIST;
+}
+
 public static boolean isTransactionalTable(Properties tableProperties) {
 String tableIsTransactional = 
tableProperties.getProperty(hive_metastoreConstants
 .TABLE_IS_TRANSACTIONAL);



[1/3] phoenix git commit: PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

2016-11-17 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 3f1be1a9c -> af564008b
  refs/heads/4.x-HBase-1.1 cd931c0b3 -> 27c24682d
  refs/heads/master 659671a41 -> d59f0e539


PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d59f0e53
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d59f0e53
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d59f0e53

Branch: refs/heads/master
Commit: d59f0e539836adad3c38dd4450b15e2c5233ca29
Parents: 659671a
Author: Jeongdae Kim 
Authored: Tue Nov 8 11:26:29 2016 +0900
Committer: Sergey Soldatov 
Committed: Thu Nov 17 12:50:37 2016 -0800

--
 .../phoenix/hive/mapreduce/PhoenixInputFormat.java  |  5 ++---
 .../phoenix/hive/util/PhoenixStorageHandlerUtil.java| 12 
 2 files changed, 14 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d59f0e53/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index fd6a631..7eab317 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.*;
@@ -110,7 +109,7 @@ public class PhoenixInputFormat 
implements InputFormat columnTypeMap = 
PhoenixStorageHandlerUtil.createColumnTypeMap
 (jobConf);
@@ -120,7 +119,7 @@ public class PhoenixInputFormat 
implements InputFormathttp://git-wip-us.apache.org/repos/asf/phoenix/blob/d59f0e53/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
index 0dd1134..18799a5 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.net.DNS;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
@@ -46,7 +47,10 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -205,6 +209,14 @@ public class PhoenixStorageHandlerUtil {
 return columnTypeMap;
 }
 
+public static List getReadColumnNames(Configuration conf) {
+String colNames = 
conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR);
+if (colNames != null && !colNames.isEmpty()) {
+return 
Arrays.asList(colNames.split(PhoenixStorageHandlerConstants.COMMA));
+}
+return Collections.EMPTY_LIST;
+}
+
 public static boolean isTransactionalTable(Properties tableProperties) {
 String tableIsTransactional = 
tableProperties.getProperty(hive_metastoreConstants
 .TABLE_IS_TRANSACTIONAL);



[3/3] phoenix git commit: PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

2016-11-17 Thread ssa
PHOENIX-3391 Supporting Hive 2.1.0 in PhoenixStorageHandler

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/27c24682
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/27c24682
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/27c24682

Branch: refs/heads/4.x-HBase-1.1
Commit: 27c24682da93349c6c3b2dc0f397bd71e8a90aec
Parents: cd931c0
Author: Jeongdae Kim 
Authored: Tue Nov 8 11:26:29 2016 +0900
Committer: Sergey Soldatov 
Committed: Thu Nov 17 12:51:23 2016 -0800

--
 .../phoenix/hive/mapreduce/PhoenixInputFormat.java  |  5 ++---
 .../phoenix/hive/util/PhoenixStorageHandlerUtil.java| 12 
 2 files changed, 14 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/27c24682/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index fd6a631..7eab317 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.*;
@@ -110,7 +109,7 @@ public class PhoenixInputFormat 
implements InputFormat columnTypeMap = 
PhoenixStorageHandlerUtil.createColumnTypeMap
 (jobConf);
@@ -120,7 +119,7 @@ public class PhoenixInputFormat 
implements InputFormathttp://git-wip-us.apache.org/repos/asf/phoenix/blob/27c24682/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
index 0dd1134..18799a5 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.net.DNS;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
@@ -46,7 +47,10 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -205,6 +209,14 @@ public class PhoenixStorageHandlerUtil {
 return columnTypeMap;
 }
 
+public static List getReadColumnNames(Configuration conf) {
+String colNames = 
conf.get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR);
+if (colNames != null && !colNames.isEmpty()) {
+return 
Arrays.asList(colNames.split(PhoenixStorageHandlerConstants.COMMA));
+}
+return Collections.EMPTY_LIST;
+}
+
 public static boolean isTransactionalTable(Properties tableProperties) {
 String tableIsTransactional = 
tableProperties.getProperty(hive_metastoreConstants
 .TABLE_IS_TRANSACTIONAL);



Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-master/1496/

2016-11-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-master/1496/


Affected test class(es):
Set(['org.apache.phoenix.iterate.RoundRobinResultIteratorIT', 
'org.apache.phoenix.end2end.HashJoinIT', 
'org.apache.phoenix.trace.PhoenixTracingEndToEndIT', 
'org.apache.phoenix.end2end.index.MutableIndexIT', 
'org.apache.phoenix.rpc.UpdateCacheIT', 
'org.apache.phoenix.end2end.index.IndexIT', 
'org.apache.phoenix.end2end.index.IndexExpressionIT', 
'org.apache.phoenix.end2end.SortMergeJoinIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


phoenix git commit: PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be investigated

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.0 0b69d24c2 -> a06a76387


PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be investigated


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a06a7638
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a06a7638
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a06a7638

Branch: refs/heads/4.8-HBase-1.0
Commit: a06a76387d501cba616005ad33a84ec511f1b0be
Parents: 0b69d24
Author: James Taylor 
Authored: Thu Nov 17 12:02:45 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 12:07:41 2016 -0800

--
 .../org/apache/phoenix/end2end/IndexToolIT.java | 28 +---
 1 file changed, 18 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a06a7638/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
index 16db876..7f28322 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
@@ -37,12 +37,14 @@ import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.phoenix.mapreduce.index.IndexTool;
+import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -56,7 +58,7 @@ import com.google.common.collect.Maps;
  * Tests for the {@link IndexTool}
  */
 @RunWith(Parameterized.class)
-public class IndexToolIT extends BaseOwnClusterHBaseManagedTimeIT {
+public class IndexToolIT extends BaseTest {
 
 private final String schemaName;
 private final String dataTable;
@@ -66,9 +68,14 @@ public class IndexToolIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 private final boolean directApi;
 private final String tableDDLOptions;
 
-public IndexToolIT(boolean transactional, boolean localIndex, boolean 
mutable, boolean directApi) {
-this.schemaName = "S";
-this.dataTable = "T" + (transactional ? "_TXN" : "");
+@AfterClass
+public static void doTeardown() throws Exception {
+tearDownMiniCluster();
+}
+
+public IndexToolIT(boolean transactional, boolean mutable, boolean 
localIndex, boolean directApi) {
+this.schemaName = generateRandomString();
+this.dataTable = generateRandomString();
 this.localIndex = localIndex;
 this.transactional = transactional;
 this.directApi = directApi;
@@ -88,9 +95,7 @@ public class IndexToolIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 public static void doSetup() throws Exception {
 Map serverProps = Maps.newHashMapWithExpectedSize(1);
 serverProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, 
QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS);
-Map clientProps = Maps.newHashMapWithExpectedSize(1);
-clientProps.put(QueryServices.TRANSACTIONS_ENABLED, "true");
-setUpRealDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), 
new ReadOnlyProps(clientProps.entrySet().iterator()));
+setUpRealDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), 
ReadOnlyProps.EMPTY_PROPS);
 }
 
 @Parameters(name="transactional = {0} , mutable = {1} , localIndex = {2}, 
directApi = {3}")
@@ -98,8 +103,10 @@ public class IndexToolIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 return Arrays.asList(new Boolean[][] { 
  { false, false, false, false }, { false, false, false, true 
}, { false, false, true, false }, { false, false, true, true }, 
  { false, true, false, false }, { false, true, false, true }, 
{ false, true, true, false }, { false, true, true, true }, 
+ /* Commenting out due to potential issue in PHOENIX-3448 and 
general flappiness
  { true, false, false, false }, { true, false, false, true }, 
{ true, false, true, false }, { true, false, true, true }, 
- { true, true, false, false }, { true, true, false, true }, { 
true, true, true, false }, { true, true, true, true }
+ { true, true, false, false }, { true, true, false, true }, { 
true, true, true, false }, { true, true, true, true } 
+ */
});
 }

[2/3] phoenix git commit: PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration properly and don't work in secured env

2016-11-17 Thread ssa
PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration properly 
and don't work in secured env


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3f1be1a9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3f1be1a9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3f1be1a9

Branch: refs/heads/4.x-HBase-0.98
Commit: 3f1be1a9cbab3647d3e915d4935855d68180b808
Parents: b62ebe0
Author: Sergey Soldatov 
Authored: Wed Nov 16 17:07:08 2016 -0800
Committer: Sergey Soldatov 
Committed: Thu Nov 17 12:07:02 2016 -0800

--
 .../phoenix/hive/PhoenixStorageHandler.java | 55 
 1 file changed, 55 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3f1be1a9/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index bda2282..a425b7c 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -19,7 +19,10 @@ package org.apache.phoenix.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.mapred.TableMapReduceUtil;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
@@ -34,6 +37,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -43,6 +47,7 @@ import org.apache.phoenix.hive.mapreduce.PhoenixOutputFormat;
 import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -54,6 +59,22 @@ import java.util.Properties;
 public class PhoenixStorageHandler extends DefaultStorageHandler implements
 HiveStoragePredicateHandler, InputEstimator {
 
+
+private Configuration jobConf;
+private Configuration hbaseConf;
+
+
+@Override
+public void setConf(Configuration conf) {
+jobConf = conf;
+hbaseConf = HBaseConfiguration.create(conf);
+}
+
+@Override
+public Configuration getConf() {
+return hbaseConf;
+}
+
 private static final Log LOG = 
LogFactory.getLog(PhoenixStorageHandler.class);
 
 public PhoenixStorageHandler() {
@@ -67,6 +88,22 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return new PhoenixMetaHook();
 }
 
+@Override
+public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
+try {
+TableMapReduceUtil.addDependencyJars(jobConf);
+
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf,
+PhoenixStorageHandler.class);
+JobConf hbaseJobConf = new JobConf(getConf());
+
org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf);
+ShimLoader.getHadoopShims().mergeCredentials(jobConf, 
hbaseJobConf);
+} catch (IOException e) {
+throw new RuntimeException(e);
+}
+
+
+}
+
 @SuppressWarnings("rawtypes")
 @Override
 public Class getOutputFormatClass() {
@@ -167,6 +204,24 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 (PhoenixStorageHandlerConstants.ZOOKEEPER_PORT));
 jobProperties.put(HConstants.ZOOKEEPER_ZNODE_PARENT, jobProperties.get
 (PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT));
+addHBaseResources(jobConf, jobProperties);
+}
+
+/**
+ * Utility method to add hbase-default.xml and hbase-site.xml properties 
to a new map
+ * if they are not already present in the jobConf.
+ * @param jobConf Job configuration
+ * @param newJobProperties  Map to which new properties should be added
+ */
+private void addHBaseResources(Configuration jobConf,
+   Map newJobProperties) {
+Configuration conf = new Confi

[3/3] phoenix git commit: PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration properly and don't work in secured env

2016-11-17 Thread ssa
PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration properly 
and don't work in secured env


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cd931c0b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cd931c0b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cd931c0b

Branch: refs/heads/4.x-HBase-1.1
Commit: cd931c0b304a54019c2c7ea66186005f4f5b5e3e
Parents: 5048af4
Author: Sergey Soldatov 
Authored: Wed Nov 16 17:07:08 2016 -0800
Committer: Sergey Soldatov 
Committed: Thu Nov 17 12:07:38 2016 -0800

--
 .../phoenix/hive/PhoenixStorageHandler.java | 55 
 1 file changed, 55 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cd931c0b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index bda2282..a425b7c 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -19,7 +19,10 @@ package org.apache.phoenix.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.mapred.TableMapReduceUtil;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
@@ -34,6 +37,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -43,6 +47,7 @@ import org.apache.phoenix.hive.mapreduce.PhoenixOutputFormat;
 import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -54,6 +59,22 @@ import java.util.Properties;
 public class PhoenixStorageHandler extends DefaultStorageHandler implements
 HiveStoragePredicateHandler, InputEstimator {
 
+
+private Configuration jobConf;
+private Configuration hbaseConf;
+
+
+@Override
+public void setConf(Configuration conf) {
+jobConf = conf;
+hbaseConf = HBaseConfiguration.create(conf);
+}
+
+@Override
+public Configuration getConf() {
+return hbaseConf;
+}
+
 private static final Log LOG = 
LogFactory.getLog(PhoenixStorageHandler.class);
 
 public PhoenixStorageHandler() {
@@ -67,6 +88,22 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return new PhoenixMetaHook();
 }
 
+@Override
+public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
+try {
+TableMapReduceUtil.addDependencyJars(jobConf);
+
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf,
+PhoenixStorageHandler.class);
+JobConf hbaseJobConf = new JobConf(getConf());
+
org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf);
+ShimLoader.getHadoopShims().mergeCredentials(jobConf, 
hbaseJobConf);
+} catch (IOException e) {
+throw new RuntimeException(e);
+}
+
+
+}
+
 @SuppressWarnings("rawtypes")
 @Override
 public Class getOutputFormatClass() {
@@ -167,6 +204,24 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 (PhoenixStorageHandlerConstants.ZOOKEEPER_PORT));
 jobProperties.put(HConstants.ZOOKEEPER_ZNODE_PARENT, jobProperties.get
 (PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT));
+addHBaseResources(jobConf, jobProperties);
+}
+
+/**
+ * Utility method to add hbase-default.xml and hbase-site.xml properties 
to a new map
+ * if they are not already present in the jobConf.
+ * @param jobConf Job configuration
+ * @param newJobProperties  Map to which new properties should be added
+ */
+private void addHBaseResources(Configuration jobConf,
+   Map newJobProperties) {
+Configuration conf = new Config

[1/3] phoenix git commit: PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration properly and don't work in secured env

2016-11-17 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 b62ebe0c1 -> 3f1be1a9c
  refs/heads/4.x-HBase-1.1 5048af43d -> cd931c0b3
  refs/heads/master f1ecd4f7d -> 659671a41


PHOENIX-3490 PhoenixStorageHandler doesn't handler hbase configuration properly 
and don't work in secured env


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/659671a4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/659671a4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/659671a4

Branch: refs/heads/master
Commit: 659671a411cdbf055ce107d2e83571e66024b4f1
Parents: f1ecd4f
Author: Sergey Soldatov 
Authored: Wed Nov 16 17:07:08 2016 -0800
Committer: Sergey Soldatov 
Committed: Thu Nov 17 12:06:34 2016 -0800

--
 .../phoenix/hive/PhoenixStorageHandler.java | 55 
 1 file changed, 55 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/659671a4/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index bda2282..a425b7c 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -19,7 +19,10 @@ package org.apache.phoenix.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.mapred.TableMapReduceUtil;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
@@ -34,6 +37,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -43,6 +47,7 @@ import org.apache.phoenix.hive.mapreduce.PhoenixOutputFormat;
 import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -54,6 +59,22 @@ import java.util.Properties;
 public class PhoenixStorageHandler extends DefaultStorageHandler implements
 HiveStoragePredicateHandler, InputEstimator {
 
+
+private Configuration jobConf;
+private Configuration hbaseConf;
+
+
+@Override
+public void setConf(Configuration conf) {
+jobConf = conf;
+hbaseConf = HBaseConfiguration.create(conf);
+}
+
+@Override
+public Configuration getConf() {
+return hbaseConf;
+}
+
 private static final Log LOG = 
LogFactory.getLog(PhoenixStorageHandler.class);
 
 public PhoenixStorageHandler() {
@@ -67,6 +88,22 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return new PhoenixMetaHook();
 }
 
+@Override
+public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
+try {
+TableMapReduceUtil.addDependencyJars(jobConf);
+
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf,
+PhoenixStorageHandler.class);
+JobConf hbaseJobConf = new JobConf(getConf());
+
org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf);
+ShimLoader.getHadoopShims().mergeCredentials(jobConf, 
hbaseJobConf);
+} catch (IOException e) {
+throw new RuntimeException(e);
+}
+
+
+}
+
 @SuppressWarnings("rawtypes")
 @Override
 public Class getOutputFormatClass() {
@@ -167,6 +204,24 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 (PhoenixStorageHandlerConstants.ZOOKEEPER_PORT));
 jobProperties.put(HConstants.ZOOKEEPER_ZNODE_PARENT, jobProperties.get
 (PhoenixStorageHandlerConstants.ZOOKEEPER_PARENT));
+addHBaseResources(jobConf, jobProperties);
+}
+
+/**
+ * Utility method to add hbase-default.xml and hbase-site.xml properties 
to a new map
+ * if they are not already present in the jobConf.
+ * @param jobConf Job configuration
+ * @param newJobProperties  Map to which new properties should 

phoenix git commit: PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be investigated

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 a083e616c -> c246b021e


PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be investigated


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c246b021
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c246b021
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c246b021

Branch: refs/heads/4.8-HBase-0.98
Commit: c246b021edf386abc6854a6c2599f09cef5e68af
Parents: a083e61
Author: James Taylor 
Authored: Thu Nov 17 12:06:10 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 12:06:10 2016 -0800

--
 .../src/it/java/org/apache/phoenix/end2end/IndexToolIT.java   | 3 ---
 1 file changed, 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c246b021/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
index 3832ae8..7f28322 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
@@ -112,9 +112,6 @@ public class IndexToolIT extends BaseTest {
 
 @Test
 public void testSecondaryIndex() throws Exception {
-//if (localIndex) { // FIXME: remove once this test works for local 
indexes
-//return;
-//}
 final String fullTableName = SchemaUtil.getTableName(schemaName, 
dataTable);
 final String indxTable = String.format("%s_%s", dataTable, "INDX");
 Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);



phoenix git commit: PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be investigated

2016-11-17 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 ebe8724ad -> a083e616c


PHOENIX-3449 Ignore hanging IndexToolIT tests until they can be investigated


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a083e616
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a083e616
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a083e616

Branch: refs/heads/4.8-HBase-0.98
Commit: a083e616c40bc903ebfbae55fdb1fac2672b8e1c
Parents: ebe8724
Author: James Taylor 
Authored: Thu Nov 17 12:02:45 2016 -0800
Committer: James Taylor 
Committed: Thu Nov 17 12:02:45 2016 -0800

--
 .../org/apache/phoenix/end2end/IndexToolIT.java | 31 +---
 1 file changed, 21 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a083e616/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
index 16db876..3832ae8 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
@@ -37,12 +37,14 @@ import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.phoenix.mapreduce.index.IndexTool;
+import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -56,7 +58,7 @@ import com.google.common.collect.Maps;
  * Tests for the {@link IndexTool}
  */
 @RunWith(Parameterized.class)
-public class IndexToolIT extends BaseOwnClusterHBaseManagedTimeIT {
+public class IndexToolIT extends BaseTest {
 
 private final String schemaName;
 private final String dataTable;
@@ -66,9 +68,14 @@ public class IndexToolIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 private final boolean directApi;
 private final String tableDDLOptions;
 
-public IndexToolIT(boolean transactional, boolean localIndex, boolean 
mutable, boolean directApi) {
-this.schemaName = "S";
-this.dataTable = "T" + (transactional ? "_TXN" : "");
+@AfterClass
+public static void doTeardown() throws Exception {
+tearDownMiniCluster();
+}
+
+public IndexToolIT(boolean transactional, boolean mutable, boolean 
localIndex, boolean directApi) {
+this.schemaName = generateRandomString();
+this.dataTable = generateRandomString();
 this.localIndex = localIndex;
 this.transactional = transactional;
 this.directApi = directApi;
@@ -88,9 +95,7 @@ public class IndexToolIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 public static void doSetup() throws Exception {
 Map serverProps = Maps.newHashMapWithExpectedSize(1);
 serverProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB, 
QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS);
-Map clientProps = Maps.newHashMapWithExpectedSize(1);
-clientProps.put(QueryServices.TRANSACTIONS_ENABLED, "true");
-setUpRealDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), 
new ReadOnlyProps(clientProps.entrySet().iterator()));
+setUpRealDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), 
ReadOnlyProps.EMPTY_PROPS);
 }
 
 @Parameters(name="transactional = {0} , mutable = {1} , localIndex = {2}, 
directApi = {3}")
@@ -98,17 +103,21 @@ public class IndexToolIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 return Arrays.asList(new Boolean[][] { 
  { false, false, false, false }, { false, false, false, true 
}, { false, false, true, false }, { false, false, true, true }, 
  { false, true, false, false }, { false, true, false, true }, 
{ false, true, true, false }, { false, true, true, true }, 
+ /* Commenting out due to potential issue in PHOENIX-3448 and 
general flappiness
  { true, false, false, false }, { true, false, false, true }, 
{ true, false, true, false }, { true, false, true, true }, 
- { true, true, false, false }, { true, true, false, true }, { 
true, true, true, false }, { true, true, true, true }
+ { true, true, false, false }, { true, true, false, true }, { 
true, true, true, false }, { true, true, true, true } 
+ */
});
 }
 

Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-calcite/41/

2016-11-17 Thread Apache Jenkins Server
[...truncated 64 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-calcite/41/


Affected test class(es):
Set(['org.apache.phoenix.end2end.ArrayIT', 
'org.apache.phoenix.end2end.QueryWithLimitIT', 
'org.apache.phoenix.end2end.QueryTimeoutIT', 
'org.apache.phoenix.end2end.AggregateQueryIT', 
'org.apache.phoenix.end2end.CastAndCoerceIT', 
'org.apache.phoenix.end2end.IndexExtendedIT', 
'org.apache.phoenix.end2end.RenewLeaseIT', 
'org.apache.phoenix.end2end.CsvBulkLoadToolIT', 
'org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT', 
'org.apache.phoenix.end2end.SortMergeJoinIT', 
'org.apache.phoenix.end2end.CaseStatementIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


Build failed in Jenkins: Phoenix-Calcite #41

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[gabrielr] Fix extension of SqlParserTest

--
[...truncated 53825 lines...]
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MOD" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_SXX" ...
"REGR_SYY" ...
"ROW_NUMBER" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"USER" ...
"NEW" ...
"CASE" ...
"NEXT" ...
"CURRENT" ...
"CURSOR" ...
"ROW" ...
"(" ...
"SELECT" ...
"VALUES" ...
"TABLE" ...
"EXPLAIN" ...
"DESCRIBE" ...
"INSERT" ...
"UPSERT" ...
"DELETE" ...
"UPDATE" ...
"MERGE" ...
"CALL" ...
"COMMIT" ...
"CREATE" "VIEW" ...
"CREATE" "TABLE" ...
"CREATE" "LOCAL" ...
"CREATE" "INDEX" ...
"CREATE" "SEQUENCE" ...
"DROP" ...
"CREATE" "OR" ...
"CREATE" "TEMPORARY" ...
"CREATE" "FUNCTION" ...
"UPLOAD" ...

at 
org.apache.phoenix.end2end.CreateSchemaIT.testCreateSchema(CreateSchemaIT.java:50)
Caused by: org.apache.phoenix.calcite.parser.ParseException: 
Encountered "CREATE SCHEMA" at line 1, column 1.
Was expecting one of:
"SET" ...
"RESET" ...
"ALTER" ...
"WITH" ...
"+" ...
"-" ...
"NOT" ...
"EXISTS" ...
 ...
 ...
 ...
 ...
 ...
 ...
 ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
 ...
 ...
 ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"INTERVAL" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
 ...
"MULTISET" ...
"ARRAY" ...
"SPECIFIC" ...
 ...
 ...
 ...
 ...
 ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"GROUPING" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MOD" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_SXX" ...
"REGR_SYY" ...
"ROW_NUMBER" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"USER" ...
"NEW" ...
"CASE" ...
"NEXT" ...
"CURRENT" ...
"CURSOR" ...
"ROW" ...
"(" ...
"SELECT" ...
"VALUES" ...
"TABLE" ...
"EXPLAIN" ...
"DESCRIBE" ...
"INSERT" ...
"UPSERT" ...
"DELETE" ...
"UPDATE" ...
"MERGE" ...
"CALL" ...
"COMMIT" ...
"CREATE" "VIEW" ...
"CREATE" "TABLE" ...
"CREATE" "LOCAL" ...
"CREATE" "INDEX" ...
"CREATE" "SEQUENCE" ...
"DROP" ...
"CREATE" "OR" ...
"CREATE" "TEMPORARY" ...
"CREATE" "FUNCTION" ...
"UPLOAD" ...

at 
org.apache.phoenix.end2end.CreateSchemaIT.testCreateSchema(CreateSchemaIT.java:50)


Results :

Tests in error: 
  ColumnProjectionOptimizationIT.testSelect:81 » SQL exception while executing 
q...
  ColumnProjectionOptimizationIT.testSelectFromViewOnExistingTable:274 » SQL 
Err...
  
ColumnProjectionOptimizationIT.testSelectWithConditionOnMultiCF:354->initMultiCFTable:326
 » SQL
  CreateSchemaIT.testCreateSchema:50 » SQL Error while executing SQL "CREATE 
SCH...

Tests run: 4, Failures: 0, Errors: 4, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTests) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.calcite.CalciteLocalIndexIT
Running org.apache.phoenix.calcite.CalciteGlobalIndexIT
Running org.apache.phoenix.calcite.CalciteIT
Running org.apache.phoenix.calcite.CalciteDMLIT
Running org.apache.phoenix.calcite.CalciteDDLIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 26.65 sec - in 
org.apache.phoenix.calcite.CalciteDMLIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 82.141 se

phoenix git commit: Fix extension of SqlParserTest

2016-11-17 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/calcite e003c579c -> 99b8a02b5


Fix extension of SqlParserTest

Override the appropriate method of SqlParserTest in
PhoenixSqlParserTest.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/99b8a02b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/99b8a02b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/99b8a02b

Branch: refs/heads/calcite
Commit: 99b8a02b5ea15f6c14ba5a0a72059a9554862003
Parents: e003c57
Author: Gabriel Reid 
Authored: Thu Nov 17 19:00:50 2016 +0100
Committer: Gabriel Reid 
Committed: Thu Nov 17 19:00:50 2016 +0100

--
 .../org/apache/phoenix/calcite/PhoenixSqlParserTest.java | 8 +++-
 1 file changed, 3 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/99b8a02b/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
index 45ebeb9..846a8ab 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
@@ -19,7 +19,7 @@ package org.apache.phoenix.calcite;
 
 import java.io.IOException;
 
-import org.apache.calcite.sql.parser.SqlParser;
+import org.apache.calcite.sql.parser.SqlParserImplFactory;
 import org.apache.calcite.sql.parser.SqlParserTest;
 import org.apache.phoenix.calcite.parser.PhoenixParserImpl;
 import org.junit.Test;
@@ -45,10 +45,8 @@ public class PhoenixSqlParserTest extends SqlParserTest {
 }
 
 @Override
-protected SqlParser getSqlParser(String sql) {
-return SqlParser.create(sql,
-
SqlParser.configBuilder().setParserFactory(PhoenixParserImpl.FACTORY)
-.build());
+protected SqlParserImplFactory parserImplFactory() {
+return PhoenixParserImpl.FACTORY;
 }
 
 @Override



Jenkins build is back to normal : Phoenix-4.x-HBase-1.1 #274

2016-11-17 Thread Apache Jenkins Server
See 



Build failed in Jenkins: Phoenix | Master #1495

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[samarth] PHOENIX-3482 Addendum to fix the test failure

--
[...truncated 891 lines...]
Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.FlappingLocalIndexIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 47.362 sec - in 
org.apache.phoenix.end2end.ConnectionUtilIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.779 sec - in 
org.apache.phoenix.end2end.CountDistinctCompressionIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.43 sec - in 
org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.IndexExtendedIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.574 sec - in 
org.apache.phoenix.end2end.QueryTimeoutIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.687 sec - in 
org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.RenewLeaseIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.633 sec - in 
org.apache.phoenix.end2end.RenewLeaseIT
Running org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.377 sec - in 
org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 107.311 sec - 
in org.apache.phoenix.end2end.CsvBulkLoadToolIT
Running org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 105.363 sec - 
in org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.end2end.index.MutableIndexFailureIT
Running org.apache.phoenix.end2end.index.LocalIndexIT
Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.012 sec - in 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 93.07 sec - in 
org.apache.phoenix.end2end.UserDefinedFunctionsIT
Running org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 43.225 sec - in 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 195.324 sec - 
in org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 276.694 sec - 
in org.apache.phoenix.end2end.StatsCollectorIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.805 sec - in 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 271.608 sec - 
in org.apache.phoenix.end2end.SysTableNamespaceMappedStatsCollectorIT
Running 
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 89.12 sec - in 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 80, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 379.489 sec - 
in org.apache.phoenix.end2end.IndexExtendedIT
Running org.apache.phoenix.execute.PartialCommitIT
Running org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.474 sec - in 
org.apache.phoenix.hbase.index.covered.EndToEndCoveredColumnsIndexBuilderIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.837 sec - in 
org.apache.phoenix.execute.PartialCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.386 sec - in 
org.apache.phoenix.hbase.index.covered.example.FailWithoutRetriesIT
Running org.apache.phoenix.hbase.index.covered.example.EndToEndCoveredIndexingIT
Running 
org.apache.phoenix.hbase.index.covered.example.EndtoEndIndexingWithCompressionIT
Running org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Running org.apache.phoenix.iterate.ScannerLeaseRenewalIT
Running org.apache.phoenix.monitoring.PhoenixMetricsIT
Running org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.176 sec - in 
org.apache.phoenix.iterate.RoundRobinResultIteratorWithStatsIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.874 sec - in 
org.apache.phoenix.rpc.PhoenixClientRpcIT
Tests run:

Jenkins build is back to normal : Phoenix | 4.x-HBase-0.98 #1387

2016-11-17 Thread Apache Jenkins Server
See 



Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.1/52/

2016-11-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.8-HBase-1.1/52/


Affected test class(es):
Set(['org.apache.phoenix.end2end.IndexToolIT', 
'org.apache.phoenix.end2end.AlterTableIT', 
'org.apache.phoenix.end2end.CsvBulkLoadToolIT', 
'org.apache.phoenix.end2end.MultiCfQueryExecIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.2/56/

2016-11-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.8-HBase-1.2/56/


Affected test class(es):
Set(['org.apache.phoenix.end2end.IndexToolIT', 
'org.apache.phoenix.end2end.AlterTableIT', 
'org.apache.phoenix.end2end.CsvBulkLoadToolIT', 
'org.apache.phoenix.end2end.ParallelIteratorsIT', 
'org.apache.phoenix.end2end.MutableIndexToolIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/49/

2016-11-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/49/


Affected test class(es):
Set(['org.apache.phoenix.end2end.IndexToolIT', 
'org.apache.phoenix.end2end.AlterTableIT', 
'org.apache.phoenix.end2end.CsvBulkLoadToolIT', 
'org.apache.phoenix.end2end.ParallelIteratorsIT', 
'org.apache.phoenix.end2end.MutableIndexToolIT'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any


Build failed in Jenkins: Phoenix-4.8-HBase-1.1 #51

2016-11-17 Thread Apache Jenkins Server
See 

Changes:

[jamestaylor] PHOENIX-3452 NULLS FIRST/NULL LAST should not impact whether 
GROUP BY is

[jamestaylor] PHOENIX-3469 Incorrect sort order for DESC primary key for NULLS

[jamestaylor] PHOENX-3451 Incorrect determination of preservation of order for 
an

--
[...truncated 785 lines...]

Results :

Tests in error: 
  UpgradeIT.testAcquiringAndReleasingUpgradeMutex:578 » UpgradeInProgress 
Cluste...
  LocalIndexIT.testLocalIndexRoundTrip:155 » PhoenixIO 
org.apache.phoenix.except...

Tests run: 1241, Failures: 0, Errors: 2, Skipped: 5

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTableReuseTest) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.AlterSessionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.163 sec - in 
org.apache.phoenix.end2end.AlterSessionIT
Running org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArrayToStringFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.046 sec - in 
org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.AutoCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.579 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.472 sec - in 
org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.932 sec - 
in org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 36, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 41.943 sec - 
in org.apache.phoenix.end2end.ArrayToStringFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.523 sec - in 
org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Running org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.819 sec - in 
org.apache.phoenix.end2end.DecodeFunctionIT
Running org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.158 sec - in 
org.apache.phoenix.end2end.DynamicUpsertIT
Running org.apache.phoenix.end2end.FirstValueFunctionIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.386 sec - in 
org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.029 sec - 
in org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.218 sec - in 
org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Running org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.LikeExpressionIT
Running org.apache.phoenix.end2end.DistinctPrefixFilterIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.626 sec - in 
org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.362 sec - in 
org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.NthValueFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.964 sec - in 
org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Running org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.115 sec - in 
org.apache.phoenix.end2end.LikeExpressionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.505 sec - in 
org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Running org.apache.phoenix.end2end.PrimitiveTypeIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.294 sec - in 
org.apache.phoenix.end2end.PrimitiveTypeIT
Running org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.QueryMoreIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.864 sec - in 
org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.RTrimFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 99.684 sec - 
in org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.ReadOnlyIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.116 sec - in 
org.apache.phoenix.end2end.DistinctPrefixFilterIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.2 95307cd9e -> 9521c6cf1


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9521c6cf
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9521c6cf
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9521c6cf

Branch: refs/heads/4.8-HBase-1.2
Commit: 9521c6cf1b32ef686cd702be82eed154accb4c51
Parents: 95307cd
Author: Samarth 
Authored: Thu Nov 17 00:28:14 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:28:14 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 31 ++--
 1 file changed, 22 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9521c6cf/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 2059c78..5280f4e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -573,10 +573,12 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateRandomString());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -586,6 +588,16 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -596,6 +608,7 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 final byte[] mutexKey = Bytes.toBytes(generateRandomString());
@@ -612,23 +625,23 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.1 f84f91aac -> c15a18a82


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c15a18a8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c15a18a8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c15a18a8

Branch: refs/heads/4.8-HBase-1.1
Commit: c15a18a828f767368d9882491bf4b0d15bc642e8
Parents: f84f91a
Author: Samarth 
Authored: Thu Nov 17 00:27:11 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:27:11 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 31 ++--
 1 file changed, 22 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c15a18a8/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 2059c78..5280f4e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -573,10 +573,12 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateRandomString());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -586,6 +588,16 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -596,6 +608,7 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 final byte[] mutexKey = Bytes.toBytes(generateRandomString());
@@ -612,23 +625,23 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-1.0 776ca5a34 -> 0b69d24c2


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0b69d24c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0b69d24c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0b69d24c

Branch: refs/heads/4.8-HBase-1.0
Commit: 0b69d24c2dc7c862d4a4fadbc7214d93808dbdf3
Parents: 776ca5a
Author: Samarth 
Authored: Thu Nov 17 00:24:55 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:24:55 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 31 ++--
 1 file changed, 22 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0b69d24c/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 2059c78..5280f4e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -573,10 +573,12 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateRandomString());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -586,6 +588,16 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -596,6 +608,7 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 final byte[] mutexKey = Bytes.toBytes(generateRandomString());
@@ -612,23 +625,23 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 988f349b2 -> ebe8724ad


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ebe8724a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ebe8724a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ebe8724a

Branch: refs/heads/4.8-HBase-0.98
Commit: ebe8724ada4a9421d75556bc24e2d34da701364e
Parents: 988f349
Author: Samarth 
Authored: Thu Nov 17 00:17:44 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:17:44 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 31 ++--
 1 file changed, 22 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ebe8724a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 2059c78..5280f4e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -573,10 +573,12 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateRandomString());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -586,6 +588,16 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -596,6 +608,7 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 final byte[] mutexKey = Bytes.toBytes(generateRandomString());
@@ -612,23 +625,23 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+   

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 48f211bbd -> 5048af43d


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5048af43
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5048af43
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5048af43

Branch: refs/heads/4.x-HBase-1.1
Commit: 5048af43d1622afdf525e128c1a6e0e96fb2cdf7
Parents: 48f211b
Author: Samarth 
Authored: Thu Nov 17 00:11:51 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:11:51 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 33 ++--
 1 file changed, 23 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5048af43/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 0e5f9f2..733dab0 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -699,10 +699,12 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateUniqueName());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -712,6 +714,16 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -722,9 +734,10 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+final byte[] mutexKey = Bytes.toBytes(generateUniqueName());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
-final byte[] mutexKey = Bytes.toBytes(generateUniqueName());
 FutureTask task1 = new FutureTask<>(new 
AcquireMutexRunnable(mutexStatus1, services, latch, numExceptions, mutexKey));
 FutureTask task2 = new FutureTask<>(new 
AcquireMutexRunnable(mutexStatus2, services, latch, numExceptions, mutexKey));
 Thread t1 = new Thread(task1);
@@ -738,23 +751,23 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't se

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/master 3f76e1180 -> f1ecd4f7d


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f1ecd4f7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f1ecd4f7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f1ecd4f7

Branch: refs/heads/master
Commit: f1ecd4f7d809c037c8b716672faf5f3b6f3d452f
Parents: 3f76e11
Author: Samarth 
Authored: Thu Nov 17 00:11:27 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:11:27 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 33 ++--
 1 file changed, 23 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1ecd4f7/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 0e5f9f2..733dab0 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -699,10 +699,12 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateUniqueName());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -712,6 +714,16 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -722,9 +734,10 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+final byte[] mutexKey = Bytes.toBytes(generateUniqueName());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
-final byte[] mutexKey = Bytes.toBytes(generateUniqueName());
 FutureTask task1 = new FutureTask<>(new 
AcquireMutexRunnable(mutexStatus1, services, latch, numExceptions, mutexKey));
 FutureTask task2 = new FutureTask<>(new 
AcquireMutexRunnable(mutexStatus2, services, latch, numExceptions, mutexKey));
 Thread t1 = new Thread(task1);
@@ -738,23 +751,23 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED

phoenix git commit: PHOENIX-3482 Addendum to fix the test failure

2016-11-17 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 487287297 -> b62ebe0c1


PHOENIX-3482 Addendum to fix the test failure


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b62ebe0c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b62ebe0c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b62ebe0c

Branch: refs/heads/4.x-HBase-0.98
Commit: b62ebe0c1799735f298e6539732053aa53a9d2b3
Parents: 4872872
Author: Samarth 
Authored: Thu Nov 17 00:10:59 2016 -0800
Committer: Samarth 
Committed: Thu Nov 17 00:10:59 2016 -0800

--
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 33 ++--
 1 file changed, 23 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b62ebe0c/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 0e5f9f2..733dab0 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -699,10 +699,12 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 ConnectionQueryServices services = null;
 byte[] mutexRowKey = SchemaUtil.getTableKey(null, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA,
 generateUniqueName());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
 assertTrue(((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey));
+dropSysMutexTable = true;
 try {
 ((ConnectionQueryServicesImpl)services)
 
.acquireUpgradeMutex(MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_7_0, 
mutexRowKey);
@@ -712,6 +714,16 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 }
 
assertTrue(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
 
assertFalse(((ConnectionQueryServicesImpl)services).releaseUpgradeMutex(mutexRowKey));
+} finally {
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't see the UNLOCKED cell present for their key. This cell is 
inserted into the table the first time we create the 
+// SYSTEM.MUTEX table.
+if (services != null && dropSysMutexTable) {
+try (HBaseAdmin admin = services.getAdmin()) {
+
admin.disableTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+
admin.deleteTable(PhoenixDatabaseMetaData.SYSTEM_MUTEX_NAME_BYTES);
+}
+}
 }
 }
 
@@ -722,9 +734,10 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 final CountDownLatch latch = new CountDownLatch(2);
 final AtomicInteger numExceptions = new AtomicInteger(0);
 ConnectionQueryServices services = null;
+final byte[] mutexKey = Bytes.toBytes(generateUniqueName());
+boolean dropSysMutexTable = false;
 try (Connection conn = getConnection(false, null)) {
 services = conn.unwrap(PhoenixConnection.class).getQueryServices();
-final byte[] mutexKey = Bytes.toBytes(generateUniqueName());
 FutureTask task1 = new FutureTask<>(new 
AcquireMutexRunnable(mutexStatus1, services, latch, numExceptions, mutexKey));
 FutureTask task2 = new FutureTask<>(new 
AcquireMutexRunnable(mutexStatus2, services, latch, numExceptions, mutexKey));
 Thread t1 = new Thread(task1);
@@ -738,23 +751,23 @@ public class UpgradeIT extends ParallelStatsDisabledIT {
 task1.get();
 task2.get();
 assertTrue("One of the threads should have acquired the mutex", 
mutexStatus1.get() || mutexStatus2.get());
+dropSysMutexTable = true;
 assertNotEquals("One and only one thread should have acquired the 
mutex ", mutexStatus1.get(),
 mutexStatus2.get());
 assertEquals("One and only one thread should have caught 
UpgradeRequiredException ", 1, numExceptions.get());
 } finally {
-if (services != null) {
-releaseUpgradeMutex(services);
+// We need to drop the SYSTEM.MUTEX table else other tests calling 
acquireUpgradeMutex will unexpectedly fail because they
+// won't