[1/3] phoenix git commit: PHOENIX-2056 Ensure PK column from base table is added to any indexes on views

2015-06-24 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 4a7022066 - 6e56eddc9


PHOENIX-2056 Ensure PK column from base table is added to any indexes on views


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b95907ef
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b95907ef
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b95907ef

Branch: refs/heads/4.x-HBase-0.98
Commit: b95907ef64c3d93c8c263d5a9f5b1533e4256ee6
Parents: 4a70220
Author: Samarth samarth.j...@salesforce.com
Authored: Tue Jun 23 11:07:36 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Tue Jun 23 11:07:36 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 170 ++-
 .../coprocessor/MetaDataEndpointImpl.java   | 145 +++-
 .../java/org/apache/phoenix/util/ByteUtil.java  |  10 +-
 3 files changed, 305 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b95907ef/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 61dd6a9..1302c60 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -2303,13 +2303,23 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 
 String alterBaseTable = ALTER TABLE  + baseTable +  ADD NEW_PK 
varchar primary key ;
 globalConn.createStatement().execute(alterBaseTable);
-
+
 // verify that the new column new_pk is now part of the primary 
key for the entire hierarchy
-
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), 
PK1, baseTable));
-
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), 
PK1, view1));
-
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), 
PK1, view2));
-
assertTrue(checkColumnPartOfPk(tenant2Conn.unwrap(PhoenixConnection.class), 
PK1, view3));
-
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), 
PK1, view4));
+
+globalConn.createStatement().execute(SELECT * FROM  + baseTable);
+
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), 
NEW_PK, baseTable));
+
+tenant1Conn.createStatement().execute(SELECT * FROM  + view1);
+
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), 
NEW_PK, view1));
+
+tenant1Conn.createStatement().execute(SELECT * FROM  + view2);
+
assertTrue(checkColumnPartOfPk(tenant1Conn.unwrap(PhoenixConnection.class), 
NEW_PK, view2));
+
+tenant2Conn.createStatement().execute(SELECT * FROM  + view3);
+
assertTrue(checkColumnPartOfPk(tenant2Conn.unwrap(PhoenixConnection.class), 
NEW_PK, view3));
+
+globalConn.createStatement().execute(SELECT * FROM  + view4);
+
assertTrue(checkColumnPartOfPk(globalConn.unwrap(PhoenixConnection.class), 
NEW_PK, view4));
 
 } finally {
 if (tenant1Conn != null) {
@@ -2344,4 +2354,152 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 tenantProps.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
 return DriverManager.getConnection(getUrl(), tenantProps);
 }
+
+@Test
+public void testAddPKColumnToBaseTableWhoseViewsHaveIndices() throws 
Exception {
+String baseTable = testAddPKColumnToBaseTableWhoseViewsHaveIndices;
+String view1 = view1;
+String view2 = view2;
+String view3 = view3;
+String tenant1 = tenant1;
+String tenant2 = tenant2;
+String view2Index = view2 + _idx;
+String view3Index = view3 + _idx;
+/*  baseTable(mutli-tenant)
+ /   \
+ view1(tenant1)  view3(tenant2, index) 
+  /
+view2(tenant1, index)  
+ */
+try (Connection globalConn = DriverManager.getConnection(getUrl())) {
+// make sure that the tables are empty, but reachable
+globalConn
+.createStatement()
+.execute(
+CREATE TABLE 
++ baseTable
++  (TENANT_ID VARCHAR NOT NULL, K1 varchar not 
null, 

[2/3] phoenix git commit: PHOENIX-2055 Allow view with views to add column

2015-06-24 Thread samarth
PHOENIX-2055 Allow view with views to add column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/97f0d626
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/97f0d626
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/97f0d626

Branch: refs/heads/4.x-HBase-0.98
Commit: 97f0d626079321fff78d1b117dbc3cfc16d7e2ee
Parents: b95907e
Author: Samarth samarth.j...@salesforce.com
Authored: Tue Jun 23 15:33:17 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Tue Jun 23 15:33:17 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 16 +++---
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 12 ++---
 .../coprocessor/MetaDataEndpointImpl.java   | 51 
 .../apache/phoenix/query/QueryConstants.java|  2 +-
 .../org/apache/phoenix/util/UpgradeUtil.java|  6 +--
 5 files changed, 50 insertions(+), 37 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/97f0d626/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 1302c60..3a9517d 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -2127,8 +2127,8 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 }
 
 @Test
-public void testAlteringViewThatHasChildViewsNotAllowed() throws Exception 
{
-String baseTable = testAlteringViewThatHasChildViewsNotAllowed;
+public void testAlteringViewThatHasChildViews() throws Exception {
+String baseTable = testAlteringViewThatHasChildViews;
 String childView = childView;
 String grandChildView = grandChildView;
 try (Connection conn = DriverManager.getConnection(getUrl())) {
@@ -2165,13 +2165,17 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
 }
 
-// Adding column to view that has child views should fail
+// Adding column to view that has child views is allowed
 String addColumnToChildView = ALTER VIEW  + childView +  ADD V5 
VARCHAR;
+conn.createStatement().execute(addColumnToChildView);
+// V5 column should be visible now for childView
+conn.createStatement().execute(SELECT V5 FROM  + childView);
+
+// However, column V5 shouldn't have propagated to grandChildView. 
Not till PHOENIX-2054 is fixed.
 try {
-conn.createStatement().execute(addColumnToChildView);
-fail(Adding columns to a view that has child views on it is 
not allowed);
+conn.createStatement().execute(SELECT V5 FROM  + 
grandChildView);
 } catch (SQLException e) {
-
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
+assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), 
e.getErrorCode());
 }
 
 // dropping column from the grand child view, however, should work.

http://git-wip-us.apache.org/repos/asf/phoenix/blob/97f0d626/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
index 886e567..094816c 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UpgradeIT.java
@@ -20,7 +20,7 @@ package org.apache.phoenix.end2end;
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static 
org.apache.phoenix.query.QueryConstants.BASE_TABLE_BASE_COLUMN_COUNT;
-import static 
org.apache.phoenix.query.QueryConstants.DIVORCED_VIEW_BASE_COLUMN_COUNT;
+import static 
org.apache.phoenix.query.QueryConstants.DIVERGED_VIEW_BASE_COLUMN_COUNT;
 import static 
org.apache.phoenix.util.UpgradeUtil.SELECT_BASE_COLUMN_COUNT_FROM_HEADER_ROW;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -187,14 +187,14 @@ public class UpgradeIT extends BaseHBaseManagedTimeIT {
 for (int i = 1; i =2 ; i++) {
 String tenantId = tenant + i;
 checkBaseColumnCount(tenantId, null, 

[3/3] phoenix git commit: PHOENIX-2056 Additional test to verify index usage after adding pk columns

2015-06-24 Thread samarth
PHOENIX-2056 Additional test to verify index usage after adding pk columns


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6e56eddc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6e56eddc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6e56eddc

Branch: refs/heads/4.x-HBase-0.98
Commit: 6e56eddc984fc1abe9dadba91954cc148e7455bf
Parents: 97f0d62
Author: Samarth samarth.j...@salesforce.com
Authored: Wed Jun 24 00:07:32 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Wed Jun 24 00:07:32 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 28 +++-
 1 file changed, 21 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6e56eddc/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 3a9517d..946aaab 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -46,10 +46,12 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeepDeletedCells;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PTable;
@@ -2402,7 +2404,6 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 tenantConn.createStatement().execute(CREATE INDEX  + view2Index 
+  ON  + view2 +  (v1) include (v2));
 assertEquals(0, getTableSequenceNumber(phxConn, view2Index));
 assertEquals(4, getMaxKeySequenceNumber(phxConn, view2Index));
-
 }
 try (Connection tenantConn = getTenantConnection(tenant2)) {
 // create tenant specific view for tenant2 - view3
@@ -2410,13 +2411,13 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 PhoenixConnection phxConn = 
tenantConn.unwrap(PhoenixConnection.class);
 assertEquals(0, getTableSequenceNumber(phxConn, view3));
 assertEquals(2, getMaxKeySequenceNumber(phxConn, view3));
-
+
 
 // create an index on view3
 tenantConn.createStatement().execute(CREATE INDEX  + view3Index 
+  ON  + view3 +  (v1) include (v2));
 assertEquals(0, getTableSequenceNumber(phxConn, view3Index));
 assertEquals(4, getMaxKeySequenceNumber(phxConn, view3Index));
-
+
 
 }
 
@@ -2424,7 +2425,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 try (Connection globalConn = DriverManager.getConnection(getUrl())) {
 globalConn.createStatement().execute(ALTER TABLE  + baseTable + 
 ADD v3 VARCHAR, k2 VARCHAR PRIMARY KEY, k3 VARCHAR PRIMARY KEY);
 assertEquals(4, 
getMaxKeySequenceNumber(globalConn.unwrap(PhoenixConnection.class), baseTable));
-
+
 // Upsert records in the base table
 String upsert = UPSERT INTO  + baseTable +  (TENANT_ID, K1, K2, 
K3, V1, V2, V3) VALUES (?, ?, ?, ?, ?, ?, ?);
 PreparedStatement stmt = globalConn.prepareStatement(upsert);
@@ -2458,7 +2459,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(1, getTableSequenceNumber(phxConn, view1));
 assertEquals(4, getMaxKeySequenceNumber(phxConn, view1));
 verifyNewColumns(rs, K2, K3, V3);
-
+
 
 rs = tenantConn.createStatement().executeQuery(SELECT K2, K3, V3 
FROM  + view2);
 assertTrue(checkColumnPartOfPk(phxConn, k2, view2));
@@ -2466,7 +2467,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(1, getTableSequenceNumber(phxConn, view2));
 assertEquals(4, getMaxKeySequenceNumber(phxConn, view2));
 verifyNewColumns(rs, K2, K3, V3);
-
+
 assertTrue(checkColumnPartOfPk(phxConn, 
IndexUtil.getIndexColumnName(null, k2), view2Index));
 assertTrue(checkColumnPartOfPk(phxConn, 
IndexUtil.getIndexColumnName(null, k3), view2Index));
 assertEquals(1, getTableSequenceNumber(phxConn, 

[03/16] phoenix git commit: PHOENIX-2014 WHERE search condition ignored when also using row value constructor in view

2015-06-24 Thread tdsilva
PHOENIX-2014 WHERE search condition ignored when also using row value 
constructor in view


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/14d11b13
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/14d11b13
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/14d11b13

Branch: refs/heads/json
Commit: 14d11b130ca0b3726e7724a1f4a9770bc1cb2453
Parents: 8060048
Author: James Taylor jamestay...@apache.org
Authored: Wed Jun 17 16:58:51 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Wed Jun 17 16:58:51 2015 -0700

--
 .../phoenix/end2end/RowValueConstructorIT.java  | 28 
 .../apache/phoenix/compile/WhereOptimizer.java  | 25 ++---
 .../phoenix/compile/WhereOptimizerTest.java | 20 ++
 3 files changed, 64 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/14d11b13/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
index 3859785..e227eb0 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
@@ -1395,4 +1395,32 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 assertEquals(1, numRecords);
 }
 
+@Test
+public void testRVCInView() throws Exception {
+Connection conn = nextConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE TEST_TABLE.TEST1 (\n + 
+PK1 CHAR(3) NOT NULL, \n + 
+PK2 CHAR(3) NOT NULL,\n + 
+DATA1 CHAR(10)\n + 
+CONSTRAINT PK PRIMARY KEY (PK1, PK2)));
+conn.close();
+conn = nextConnection(getUrl());
+conn.createStatement().execute(CREATE VIEW TEST_TABLE.FOO AS SELECT * 
FROM TEST_TABLE.TEST1 WHERE PK1 = 'FOO');
+conn.close();
+conn = nextConnection(getUrl());
+conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 
VALUES('FOO','001','SOMEDATA'));
+conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 
VALUES('FOO','002','SOMEDATA'));
+conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 
VALUES('FOO','003','SOMEDATA'));
+conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 
VALUES('FOO','004','SOMEDATA'));
+conn.createStatement().execute(UPSERT INTO TEST_TABLE.TEST1 
VALUES('FOO','005','SOMEDATA'));
+conn.commit();
+conn.close();
+
+conn = nextConnection(getUrl());
+ResultSet rs = conn.createStatement().executeQuery(SELECT * FROM 
TEST_TABLE.FOO WHERE PK2  '004' AND (PK1,PK2)  ('FOO','002') LIMIT 2);
+assertTrue(rs.next());
+assertEquals(003, rs.getString(PK2));
+assertFalse(rs.next());
+conn.close();
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/14d11b13/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
index a5aef02..b7f04e0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
@@ -647,23 +647,30 @@ public class WhereOptimizer {
 if (childSlot == EMPTY_KEY_SLOTS) {
 return EMPTY_KEY_SLOTS;
 }
-// FIXME: get rid of this min/max range BS now that a key 
range can span multiple columns
+// FIXME: get rid of this special-cased min/max range now that 
a key range can span multiple columns
 if (childSlot.getMinMaxRange() != null) { // Only set if in 
initial pk position
-// TODO: potentially use KeySlot.intersect here. However, 
we can't intersect the key ranges in the slot
-// with our minMaxRange, since it spans columns and this 
would mess up our skip scan.
+// TODO: fix intersectSlots so that it works with RVCs. 
We'd just need to fill in the leading parts
+// of the key with the minMaxRange and then intersect the 
key parts that overlap.
 minMaxRange = 
minMaxRange.intersect(childSlot.getMinMaxRange());
 for (KeySlot slot : childSlot) {
 if (slot 

[07/16] phoenix git commit: PHOENIX-1504 Support adding column to a table that has views (Samarth Jain/Dave Hacker)

2015-06-24 Thread tdsilva
PHOENIX-1504 Support adding column to a table that has views (Samarth Jain/Dave 
Hacker)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e78eb6fa
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e78eb6fa
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e78eb6fa

Branch: refs/heads/json
Commit: e78eb6faceec40d8b09fbc7dde778b87fe54feef
Parents: 2d70eff
Author: Samarth samarth.j...@salesforce.com
Authored: Thu Jun 18 15:37:37 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Thu Jun 18 15:37:37 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 356 +
 .../end2end/TenantSpecificTablesDDLIT.java  |  20 +-
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 332 
 .../coprocessor/MetaDataEndpointImpl.java   | 262 +---
 .../phoenix/coprocessor/MetaDataProtocol.java   |   4 +-
 .../coprocessor/generated/PTableProtos.java | 103 -
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   3 +-
 .../query/ConnectionQueryServicesImpl.java  |  51 ++-
 .../apache/phoenix/query/QueryConstants.java|  30 +-
 .../apache/phoenix/schema/DelegateTable.java|   5 +
 .../apache/phoenix/schema/MetaDataClient.java   |  37 +-
 .../java/org/apache/phoenix/schema/PTable.java  |   1 +
 .../org/apache/phoenix/schema/PTableImpl.java   |  40 +-
 .../java/org/apache/phoenix/util/ByteUtil.java  |  10 +-
 .../org/apache/phoenix/util/UpgradeUtil.java| 395 ++-
 15 files changed, 1495 insertions(+), 154 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e78eb6fa/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 59698d6..61dd6a9 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.end2end;
 
 import static 
org.apache.hadoop.hbase.HColumnDescriptor.DEFAULT_REPLICATION_SCOPE;
+import static 
org.apache.phoenix.exception.SQLExceptionCode.CANNOT_MUTATE_TABLE;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.apache.phoenix.util.TestUtil.closeConnection;
 import static org.apache.phoenix.util.TestUtil.closeStatement;
@@ -32,9 +33,11 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
@@ -48,8 +51,10 @@ import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableKey;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
@@ -59,6 +64,8 @@ import org.apache.phoenix.util.SchemaUtil;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import com.google.common.base.Objects;
+
 /**
  *
  * A lot of tests in this class test HBase level properties. As a result,
@@ -1988,4 +1995,353 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 conn.close();
 }
 }
+
+@Test
+public void testAddColumnToTableWithViews() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+try {   
+conn.createStatement().execute(CREATE TABLE IF NOT EXISTS 
TABLEWITHVIEW (
++  ID char(1) NOT NULL,
++  COL1 integer NOT NULL,
++  COL2 bigint NOT NULL,
++  CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)
++  ));
+assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, -1, ID, COL1, COL2);
+
+conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 SMALLINT ) AS SELECT * FROM TABLEWITHVIEW);
+assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 4, 3, ID, COL1, COL2, VIEW_COL1);
+
+conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD COL3 
char(10));
+

[10/16] phoenix git commit: PHOENIX-1920 - Pherf - Add support for mixed r/w workloads

2015-06-24 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
--
diff --git 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
index 523feb4..39d6a9c 100644
--- 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
+++ 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultManager.java
@@ -33,17 +33,13 @@ public class ResultManager {
 private final ResultUtil util;
 private final PherfConstants.RunMode runMode;
 
-
 public ResultManager(String fileNameSeed, PherfConstants.RunMode runMode) {
-this(runMode, Arrays.asList(
-new XMLResultHandler(fileNameSeed, ResultFileDetails.XML),
+this(runMode, Arrays.asList(new XMLResultHandler(fileNameSeed, 
ResultFileDetails.XML),
 new ImageResultHandler(fileNameSeed, ResultFileDetails.IMAGE),
-   new CSVResultHandler(
-   fileNameSeed,
-   runMode == 
RunMode.PERFORMANCE ? ResultFileDetails.CSV_DETAILED_PERFORMANCE
-   
: ResultFileDetails.CSV_DETAILED_FUNCTIONAL),
-new CSVResultHandler(fileNameSeed, 
ResultFileDetails.CSV_AGGREGATE_PERFORMANCE)
-));
+new CSVResultHandler(fileNameSeed, runMode == 
RunMode.PERFORMANCE ?
+ResultFileDetails.CSV_DETAILED_PERFORMANCE :
+ResultFileDetails.CSV_DETAILED_FUNCTIONAL),
+new CSVResultHandler(fileNameSeed, 
ResultFileDetails.CSV_AGGREGATE_PERFORMANCE)));
 }
 
 public ResultManager(PherfConstants.RunMode runMode, ListResultHandler 
resultHandlers) {
@@ -81,6 +77,7 @@ public class ResultManager {
 
 /**
  * Write a combined set of results for each result in the list.
+ *
  * @param dataModelResults List{@link DataModelResult  /}
  * @throws Exception
  */
@@ -89,7 +86,9 @@ public class ResultManager {
 
 CSVResultHandler detailsCSVWriter = null;
 try {
-detailsCSVWriter = new 
CSVResultHandler(PherfConstants.COMBINED_FILE_NAME, 
ResultFileDetails.CSV_DETAILED_PERFORMANCE);
+detailsCSVWriter =
+new CSVResultHandler(PherfConstants.COMBINED_FILE_NAME,
+ResultFileDetails.CSV_DETAILED_PERFORMANCE);
 for (DataModelResult dataModelResult : dataModelResults) {
 util.write(detailsCSVWriter, dataModelResult, runMode);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
--
diff --git 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
index fd960d1..07dfa86 100644
--- 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
+++ 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/result/ResultUtil.java
@@ -22,15 +22,16 @@ import org.apache.phoenix.pherf.PherfConstants;
 import org.apache.phoenix.pherf.PherfConstants.RunMode;
 import org.apache.phoenix.pherf.result.file.ResultFileDetails;
 import org.apache.phoenix.pherf.result.impl.CSVResultHandler;
-import org.apache.phoenix.pherf.result.impl.ImageResultHandler;
-import org.apache.phoenix.pherf.result.impl.XMLResultHandler;
 import org.apache.phoenix.pherf.util.PhoenixUtil;
 
-import java.io.*;
+import java.io.File;
+import java.io.IOException;
 import java.text.Format;
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Date;
 import java.util.List;
+import java.util.Map;
 
 public class ResultUtil {
 
@@ -54,7 +55,10 @@ public class ResultUtil {
 ListResultValue rowValues = new ArrayList();
 rowValues.add(new ResultValue(PhoenixUtil.getZookeeper()));
 
rowValues.addAll(writeThreadTime.getCsvRepresentation(this));
-Result result = new 
Result(ResultFileDetails.CSV_DETAILED_PERFORMANCE, ZK, + 
dataLoadThreadTime.getCsvTitle(), rowValues);
+Result
+result =
+new 
Result(ResultFileDetails.CSV_DETAILED_PERFORMANCE,
+ZK, + dataLoadThreadTime.getCsvTitle(), 
rowValues);
 writer.write(result);
 }
 }
@@ -83,7 +87,10 @@ public class ResultUtil {
 ListResultValue 

[44/49] phoenix git commit: PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)

2015-06-24 Thread tdsilva
PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b61ef77e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b61ef77e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b61ef77e

Branch: refs/heads/json
Commit: b61ef77e509a024ccaf6c3ce74c385c31c5f534a
Parents: c2927dd
Author: Nick Dimiduk ndimi...@apache.org
Authored: Mon Jun 15 16:16:03 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 15 16:16:03 2015 -0700

--
 phoenix-assembly/pom.xml |  4 
 phoenix-spark/pom.xml| 51 ---
 2 files changed, 32 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b61ef77e/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index baf6738..51ff74d 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -152,6 +152,10 @@
 /dependency
 dependency
   groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-spark/artifactId
+/dependency
+dependency
+  groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-server/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b61ef77e/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 7086bb6..289801a 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -45,12 +45,7 @@
   groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-core/artifactId
 /dependency
-dependency
-  groupIdorg.apache.phoenix/groupId
-  artifactIdphoenix-core/artifactId
-  classifiertests/classifier
-  scopetest/scope
-/dependency
+
 !-- Force import of Spark's servlet API for unit tests --
 dependency
   groupIdjavax.servlet/groupId
@@ -59,16 +54,38 @@
   scopetest/scope
 /dependency
 
+!-- Mark Spark / Scala as provided --
 dependency
-  groupIdjunit/groupId
-  artifactIdjunit/artifactId
+  groupIdorg.scala-lang/groupId
+  artifactIdscala-library/artifactId
+  version${scala.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-core_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-sql_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+
+!-- Test dependencies --
+dependency
+  groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-core/artifactId
+  classifiertests/classifier
   scopetest/scope
 /dependency
 
 dependency
-  groupIdorg.scala-lang/groupId
-  artifactIdscala-library/artifactId
-  version${scala.version}/version
+  groupIdjunit/groupId
+  artifactIdjunit/artifactId
+  scopetest/scope
 /dependency
 
 dependency
@@ -86,18 +103,6 @@
 /dependency
 
 dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-core_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-sql_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-client/artifactId
   version${hadoop-two.version}/version



[46/49] phoenix git commit: PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types

2015-06-24 Thread tdsilva
PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data 
types


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a4aa780c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a4aa780c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a4aa780c

Branch: refs/heads/json
Commit: a4aa780c78f73cf0ee5f7d5e7afefd7ab581097a
Parents: 8a0dee7
Author: Prashant Kommireddi 
pkommire...@pkommireddi-ltm.internal.salesforce.com
Authored: Mon May 18 19:48:30 2015 -0700
Committer: Eli Levine elilev...@apache.org
Committed: Mon Jun 15 18:17:45 2015 -0700

--
 .../src/main/java/org/apache/phoenix/pig/util/TypeUtil.java  | 8 +++-
 .../test/java/org/apache/phoenix/pig/util/TypeUtilTest.java  | 8 +++-
 2 files changed, 6 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4aa780c/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
index c8bc9d8..6e32fb5 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
@@ -1,17 +1,15 @@
 /*
- * Copyright 2010 The Apache Software Foundation
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
- *distributed with this work for additional information
+ * distributed with this work for additional information
  * regarding copyright ownership.  The ASF licenses this file
  * to you under the Apache License, Version 2.0 (the
- * License); you maynot use this file except in compliance
+ * License); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
  * http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicablelaw or agreed to in writing, software
+ * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an AS IS BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4aa780c/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
--
diff --git 
a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java 
b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
index 56167f6..0b44d2b 100644
--- a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
+++ b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
@@ -1,17 +1,15 @@
 /*
- * Copyright 2010 The Apache Software Foundation
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
- *distributed with this work for additional information
+ * distributed with this work for additional information
  * regarding copyright ownership.  The ASF licenses this file
  * to you under the Apache License, Version 2.0 (the
- * License); you maynot use this file except in compliance
+ * License); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
  * http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicablelaw or agreed to in writing, software
+ * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an AS IS BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and



[22/49] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode

2015-06-24 Thread tdsilva
PHOENIX-2005 Connection utilities omit zk client port, parent znode


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/afb0120e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/afb0120e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/afb0120e

Branch: refs/heads/json
Commit: afb0120e079502d926c5f37de4e28d3865e29089
Parents: a28c1d3
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 11:11:48 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 11:12:28 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java | 28 --
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 93 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  | 26 +-
 .../query/ConnectionQueryServicesImpl.java  |  4 +-
 .../java/org/apache/phoenix/util/QueryUtil.java | 45 --
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 14 ++-
 .../phoenix/mapreduce/CsvBulkLoadToolTest.java  | 11 ---
 .../mapreduce/CsvToKeyValueMapperTest.java  | 15 
 .../org/apache/phoenix/util/QueryUtilTest.java  | 33 ---
 9 files changed, 139 insertions(+), 130 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/afb0120e/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 9e95667..2451603 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -31,6 +31,7 @@ import java.util.logging.Logger;
 
 import javax.annotation.concurrent.Immutable;
 
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -174,10 +175,10 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 }
 
 /**
- * 
+ *
  * Class to encapsulate connection info for HBase
  *
- * 
+ *
  * @since 0.1.1
  */
 public static class ConnectionInfo {
@@ -204,12 +205,18 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 return false;
 }
 
-protected static ConnectionInfo create(String url) throws SQLException 
{
-StringTokenizer tokenizer = new StringTokenizer(url == null ?  : 
url.substring(PhoenixRuntime.JDBC_PROTOCOL.length()),DELIMITERS, true);
+public static ConnectionInfo create(String url) throws SQLException {
+url = url == null ?  : url;
+url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
+? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
+: url;
+StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];
 String token = null;
-while (tokenizer.hasMoreTokens()  
!(token=tokenizer.nextToken()).equals(TERMINATOR)  tokenizer.hasMoreTokens() 
 nTokens  tokens.length) {
+while (tokenizer.hasMoreTokens() 
+!(token=tokenizer.nextToken()).equals(TERMINATOR) 
+tokenizer.hasMoreTokens()  nTokens  tokens.length) {
 token = tokenizer.nextToken();
 // This would mean we have an empty string for a token which 
is illegal
 if (DELIMITERS.contains(token)) {
@@ -316,8 +323,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 private final String principal;
 private final String keytab;
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode, 
String principal, String keytab) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode, String principal, String keytab) {
 this.zookeeperQuorum = zookeeperQuorum;
 this.port = port;
 this.rootNode = rootNode;
@@ -326,8 +332,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 this.keytab = keytab;
 }
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode) {
this(zookeeperQuorum, port, rootNode, null, null);
 }
 
@@ -417,6 +422,11 @@ public 

[08/49] phoenix git commit: PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi)

2015-06-24 Thread tdsilva
PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a1032fba
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a1032fba
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a1032fba

Branch: refs/heads/json
Commit: a1032fba34164b9ac9c62d2187302cdc0e8b2846
Parents: c1e5c71
Author: Jesse Yates jya...@apache.org
Authored: Wed May 13 10:00:52 2015 -0700
Committer: Jesse Yates jya...@apache.org
Committed: Thu May 14 12:52:57 2015 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a1032fba/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 23ac578..eec1f2a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -85,7 +85,7 @@
 !-- Dependency versions --
 commons-cli.version1.2/commons-cli.version
 hadoop.version2.5.1/hadoop.version
-pig.version0.12.0/pig.version
+pig.version0.13.0/pig.version
 jackson.version1.8.8/jackson.version
 antlr.version3.5/antlr.version
 log4j.version1.2.17/log4j.version



[10/49] phoenix git commit: PHOENIX-1976 Exit gracefully if addShutdownHook fails.

2015-06-24 Thread tdsilva
PHOENIX-1976 Exit gracefully if addShutdownHook fails.

If the JVM is already in the process of shutting down,
we don't need to add the shutdown hook for the PhoenixDriver
instance. Additionally, we shouldn't advertise this instance
either since we're going down.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/23f5acf8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/23f5acf8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/23f5acf8

Branch: refs/heads/json
Commit: 23f5acf86e1065f6bc8c342df4ba29f18aafea8a
Parents: 289a875
Author: Josh Elser josh.el...@gmail.com
Authored: Thu May 14 17:40:46 2015 -0400
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri May 15 11:05:05 2015 -0700

--
 .../org/apache/phoenix/jdbc/PhoenixDriver.java  | 46 ++--
 1 file changed, 32 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/23f5acf8/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
index 6360d06..cfabe82 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
@@ -60,25 +60,43 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 private static volatile String driverShutdownMsg;
 static {
 try {
-DriverManager.registerDriver( INSTANCE = new PhoenixDriver() );
-// Add shutdown hook to release any resources that were never 
closed
-// In theory not necessary, but it won't hurt anything
-Runtime.getRuntime().addShutdownHook(new Thread() {
-@Override
-public void run() {
-try {
-INSTANCE.close();
-} catch (SQLException e) {
-logger.warn(Unable to close PhoenixDriver on 
shutdown, e);
-} finally {
-driverShutdownMsg = Phoenix driver closed because 
server is shutting down;
+INSTANCE = new PhoenixDriver();
+try {
+// Add shutdown hook to release any resources that were never 
closed
+// In theory not necessary, but it won't hurt anything
+Runtime.getRuntime().addShutdownHook(new Thread() {
+@Override
+public void run() {
+closeInstance(INSTANCE);
 }
-}
-});
+});
+
+// Only register the driver when we successfully register the 
shutdown hook
+// Don't want to register it if we're already in the process 
of going down.
+DriverManager.registerDriver( INSTANCE );
+} catch (IllegalStateException e) {
+logger.warn(Failed to register PhoenixDriver shutdown hook as 
the JVM is already shutting down);
+
+// Close the instance now because we don't have the shutdown 
hook
+closeInstance(INSTANCE);
+
+throw e;
+}
 } catch (SQLException e) {
 throw new IllegalStateException(Unable to register  + 
PhoenixDriver.class.getName() + : + e.getMessage());
 }
 }
+
+private static void closeInstance(PhoenixDriver instance) {
+try {
+instance.close();
+} catch (SQLException e) {
+logger.warn(Unable to close PhoenixDriver on shutdown, e);
+} finally {
+driverShutdownMsg = Phoenix driver closed because server is 
shutting down;
+}
+}
+
 // One entry per cluster here
 private final ConcurrentMapConnectionInfo,ConnectionQueryServices 
connectionQueryServicesMap = new 
ConcurrentHashMapConnectionInfo,ConnectionQueryServices(3);
 



[27/49] phoenix git commit: PHOENIX-2010 Properly validate number of arguments passed to the functions in FunctionParseNode#validate(Rajeshbabu)

2015-06-24 Thread tdsilva
PHOENIX-2010 Properly validate number of arguments passed to the functions in 
FunctionParseNode#validate(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b7f13824
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b7f13824
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b7f13824

Branch: refs/heads/json
Commit: b7f138246328ea80ce53fb73539a1e48413a32d2
Parents: 08fc27d
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sun May 31 07:40:16 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sun May 31 07:40:16 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java   | 14 ++
 .../org/apache/phoenix/parse/FunctionParseNode.java   |  4 
 .../main/java/org/apache/phoenix/parse/PFunction.java |  4 +---
 3 files changed, 19 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f13824/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 7dbde3c..868e19d 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -442,6 +442,20 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
+try {
+rs = stmt.executeQuery(select k from t9 where 
mysum9(k,10,'x')=11);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
+try {
+rs = stmt.executeQuery(select mysum9() from t9);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
 stmt.execute(drop function mysum9);
 try {
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f13824/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
index d1001ee..be52d89 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
@@ -41,6 +41,7 @@ import 
org.apache.phoenix.expression.function.FunctionExpression;
 import org.apache.phoenix.expression.function.UDFExpression;
 import org.apache.phoenix.parse.PFunction.FunctionArgument;
 import org.apache.phoenix.schema.ArgumentTypeMismatchException;
+import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDataTypeFactory;
@@ -133,6 +134,9 @@ public class FunctionParseNode extends CompoundParseNode {
 public ListExpression validate(ListExpression children, 
StatementContext context) throws SQLException {
 BuiltInFunctionInfo info = this.getInfo();
 BuiltInFunctionArgInfo[] args = info.getArgs();
+if (args.length  children.size() || info.getRequiredArgCount()  
children.size()) {
+throw new FunctionNotFoundException(this.name);
+}
 if (args.length  children.size()) {
 ListExpression moreChildren = new 
ArrayListExpression(children);
 for (int i = children.size(); i  info.getArgs().length; i++) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f13824/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
index f4bac35..8a95ae7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
@@ -95,9 +95,7 @@ public class PFunction implements PMetaDataEntity {
 }
 
 public PFunction(PFunction 

[28/49] phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-24 Thread tdsilva
PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/583b5b1e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/583b5b1e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/583b5b1e

Branch: refs/heads/json
Commit: 583b5b1e115a81799cc3e6d0a20a0fe665f666e3
Parents: b7f1382
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 08:57:22 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/583b5b1e/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 828f776..3f73048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, ScannerContext scannerContext) 
throws IOException {



[34/49] phoenix git commit: PHOENIX-1987 SIGN built-in function should be order preserving (Shuxiong Ye)

2015-06-24 Thread tdsilva
PHOENIX-1987 SIGN built-in function should be order preserving (Shuxiong Ye)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/47466e31
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/47466e31
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/47466e31

Branch: refs/heads/json
Commit: 47466e317db72d651c120b1c04bf687abfe10e34
Parents: 6c3d50a
Author: James Taylor jamestay...@apache.org
Authored: Thu Jun 4 14:24:06 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Thu Jun 4 14:24:06 2015 -0700

--
 .../org/apache/phoenix/expression/function/SignFunction.java| 5 +
 .../java/org/apache/phoenix/expression/SignFunctionTest.java| 3 ++-
 2 files changed, 7 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/47466e31/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SignFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SignFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SignFunction.java
index 0b470f8..a11eaff 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SignFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/SignFunction.java
@@ -71,4 +71,9 @@ public class SignFunction extends ScalarFunction {
 public String getName() {
 return NAME;
 }
+
+@Override
+public OrderPreserving preservesOrder() {
+return OrderPreserving.YES;
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/47466e31/phoenix-core/src/test/java/org/apache/phoenix/expression/SignFunctionTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/SignFunctionTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/SignFunctionTest.java
index 37d6e1d..e4a5f80 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/SignFunctionTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/SignFunctionTest.java
@@ -54,7 +54,8 @@ public class SignFunctionTest {
 Expression signFunction = new SignFunction(expressions);
 ImmutableBytesWritable ptr = new ImmutableBytesWritable();
 signFunction.evaluate(null, ptr);
-Integer result = (Integer) signFunction.getDataType().toObject(ptr);
+Integer result =
+(Integer) signFunction.getDataType().toObject(ptr, 
signFunction.getSortOrder());
 assertTrue(result.compareTo(expected) == 0);
 }
 



[23/49] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)

2015-06-24 Thread tdsilva
PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e493215b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e493215b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e493215b

Branch: refs/heads/json
Commit: e493215bff7057bad1a52efecca90384a1dd9412
Parents: afb0120
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 17:41:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 17:41:04 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java |  2 +-
 .../java/org/apache/phoenix/util/QueryUtil.java |  2 +-
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 20 
 3 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e493215b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 2451603..3cfaacc 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -209,7 +209,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 url = url == null ?  : url;
 url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
 ? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
-: url;
+: PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + url;
 StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e493215b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
index bd38983..a2d4a91 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
@@ -290,7 +290,7 @@ public final class QueryUtil {
 throws ClassNotFoundException,
 SQLException {
 String url = getConnectionUrl(props, conf);
-LOG.info(Creating connection with the jdbc url: + url);
+LOG.info(Creating connection with the jdbc url:  + url);
 PropertiesUtil.extractProperties(props, conf);
 return DriverManager.getConnection(url, props);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e493215b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
index 083b205..4eda825 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
@@ -34,23 +34,33 @@ public class PhoenixEmbeddedDriverTest {
 @Test
 public void testGetConnectionInfo() throws SQLException {
 String[] urls = new String[] {
+null,
+,
 jdbc:phoenix,
 jdbc:phoenix;test=true,
 jdbc:phoenix:localhost,
+localhost,
+localhost;,
 jdbc:phoenix:localhost:123,
 jdbc:phoenix:localhost:123;foo=bar,
+localhost:123,
 jdbc:phoenix:localhost:123:/hbase,
 jdbc:phoenix:localhost:123:/foo-bar,
 jdbc:phoenix:localhost:123:/foo-bar;foo=bas,
+localhost:123:/foo-bar,
 jdbc:phoenix:localhost:/hbase,
 jdbc:phoenix:localhost:/foo-bar,
 jdbc:phoenix:localhost:/foo-bar;test=true,
+localhost:/foo-bar,
 jdbc:phoenix:v1,v2,v3,
 jdbc:phoenix:v1,v2,v3;,
 jdbc:phoenix:v1,v2,v3;test=true,
+v1,v2,v3,
 jdbc:phoenix:v1,v2,v3:/hbase,
 jdbc:phoenix:v1,v2,v3:/hbase;test=true,
+v1,v2,v3:/foo-bar,
 jdbc:phoenix:v1,v2,v3:123:/hbase,
+v1,v2,v3:123:/hbase,
 jdbc:phoenix:v1,v2,v3:123:/hbase;test=false,
 

[26/49] phoenix git commit: PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia Ying Shu)

2015-06-24 Thread tdsilva
PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/08fc27d4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/08fc27d4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/08fc27d4

Branch: refs/heads/json
Commit: 08fc27d4c352f41f4999c8aa8bce953b3f4092cb
Parents: 160e949
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri May 29 17:12:25 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri May 29 17:12:25 2015 -0700

--
 .../src/main/java/org/apache/phoenix/schema/PTableImpl.java  | 4 ++--
 .../src/test/java/org/apache/phoenix/query/BaseTest.java | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/08fc27d4/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index 9a2ae7f..b62dbf5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -88,8 +88,8 @@ public class PTableImpl implements PTable {
 
 private PTableKey key;
 private PName name;
-private PName schemaName;
-private PName tableName;
+private PName schemaName = PName.EMPTY_NAME;
+private PName tableName = PName.EMPTY_NAME;
 private PName tenantId;
 private PTableType type;
 private PIndexState state;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/08fc27d4/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..b0574c3 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -121,7 +121,6 @@ import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.ipc.controller.ServerRpcControllerFactory;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.LocalIndexMerger;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -829,6 +828,7 @@ public abstract class BaseTest {
 logger.info(Table  + fullTableName +  is already 
deleted.);
 }
 }
+rs.close();
 if (lastTenantId != null) {
 conn.close();
 }
@@ -860,6 +860,7 @@ public abstract class BaseTest {
 logger.info(DROP SEQUENCE STATEMENT: DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 conn.createStatement().execute(DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 }
+rs.close();
 }
 
 protected static void initSumDoubleValues(byte[][] splits, String url) 
throws Exception {



[11/49] phoenix git commit: PHOENIX-1980 CsvBulkLoad cannot load hbase-site.xml from classpath

2015-06-24 Thread tdsilva
PHOENIX-1980 CsvBulkLoad cannot load hbase-site.xml from classpath


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6fc53b57
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6fc53b57
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6fc53b57

Branch: refs/heads/json
Commit: 6fc53b5792ea7bdd1b486860606966e76f2e5e3f
Parents: 23f5acf
Author: Nick Dimiduk ndimi...@apache.org
Authored: Mon May 18 10:33:42 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon May 18 10:33:42 2015 -0700

--
 .../main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6fc53b57/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 31f8b42..a5a8aa1 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -176,7 +176,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 @Override
 public int run(String[] args) throws Exception {
 
-Configuration conf = HBaseConfiguration.addHbaseResources(getConf());
+Configuration conf = HBaseConfiguration.create(getConf());
 
 CommandLine cmdLine = null;
 try {



[36/49] phoenix git commit: PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)

2015-06-24 Thread tdsilva
PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b3ed60bb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b3ed60bb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b3ed60bb

Branch: refs/heads/json
Commit: b3ed60bb935a09c3ed07a6d77502136c9b8a6eef
Parents: e54c99d
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Fri Jun 5 09:02:31 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Fri Jun 5 09:02:31 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 58 ++--
 phoenix-core/src/main/antlr3/PhoenixSQL.g   | 17 +++---
 2 files changed, 61 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b3ed60bb/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 868e19d..c6bd62f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -58,6 +58,8 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.FunctionAlreadyExistsException;
 import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
@@ -121,11 +123,31 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(
ptr.set(PInteger.INSTANCE.toBytes((Integer)sum));\n)
 .append(return true;\n)
 .append(}\n).toString();
-
+private static String ARRAY_INDEX_EVALUATE_METHOD =
+new StringBuffer()
+.append(public boolean evaluate(Tuple tuple, 
ImmutableBytesWritable ptr) {\n)
+.append(Expression indexExpr = 
children.get(1);\n)
+.append(if (!indexExpr.evaluate(tuple, ptr)) {\n)
+.append(   return false;\n)
+.append(} else if (ptr.getLength() == 0) {\n)
+.append(   return true;\n)
+.append(}\n)
+.append(// Use Codec to prevent Integer object 
allocation\n)
+.append(int index = 
PInteger.INSTANCE.getCodec().decodeInt(ptr, indexExpr.getSortOrder());\n)
+.append(if(index  0) {\n)
+.append(   throw new ParseException(\Index 
cannot be negative :\ + index);\n)
+.append(}\n)
+.append(Expression arrayExpr = 
children.get(0);\n)
+.append(return 
PArrayDataType.positionAtArrayElement(tuple, ptr, index, arrayExpr, 
getDataType(),getMaxLength());\n)
+.append(}\n).toString();
+
+
 private static String MY_REVERSE_CLASS_NAME = MyReverse;
 private static String MY_SUM_CLASS_NAME = MySum;
-private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, PVarchar);
-private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, PInteger);
+private static String MY_ARRAY_INDEX_CLASS_NAME = MyArrayIndex;
+private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, return 
PVarchar.INSTANCE;);
+private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, return PInteger.INSTANCE;);
+private static String MY_ARRAY_INDEX_PROGRAM = 
getProgram(MY_ARRAY_INDEX_CLASS_NAME, ARRAY_INDEX_EVALUATE_METHOD, return 
PDataType.fromTypeId(children.get(0).getDataType().getSqlType()- 
PDataType.ARRAY_TYPE_BASE););
 private static Properties EMPTY_PROPS = new Properties();
 
 
@@ -144,6 +166,8 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(import org.apache.phoenix.schema.types.PInteger;\n)
 .append(import org.apache.phoenix.schema.types.PVarchar;\n)
 .append(import org.apache.phoenix.util.StringUtil;\n)
+.append(import 

[31/49] phoenix git commit: PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level

2015-06-24 Thread tdsilva
PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9e686b75
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9e686b75
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9e686b75

Branch: refs/heads/json
Commit: 9e686b758ff735fd9129430cd31fe36993b9711b
Parents: dc3083f
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 15:58:32 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 15:54:37 2015 -0700

--
 .../java/org/apache/phoenix/filter/RowKeyComparisonFilter.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9e686b75/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
index 2e2037b..b7de7ac 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
@@ -73,8 +73,9 @@ public class RowKeyComparisonFilter extends 
BooleanExpressionFilter {
 if (evaluate) {
 inputTuple.setKey(v.getRowArray(), v.getRowOffset(), 
v.getRowLength());
 this.keepRow = Boolean.TRUE.equals(evaluate(inputTuple));
-if (logger.isDebugEnabled()) {
-logger.debug(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)  +  row  + inputTuple);
+if (logger.isTraceEnabled()) {
+logger.trace(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)
++  row  + inputTuple);
 }
 evaluate = false;
 }



[37/49] phoenix git commit: PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia Ying Shu)

2015-06-24 Thread tdsilva
PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/db90196d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/db90196d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/db90196d

Branch: refs/heads/json
Commit: db90196dc2561a220fc376ce01a8ad1ba185bea8
Parents: b3ed60b
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed Jun 10 01:00:50 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed Jun 10 01:00:50 2015 +0530

--
 .../apache/phoenix/end2end/SortMergeJoinIT.java | 54 
 .../phoenix/execute/SortMergeJoinPlan.java  |  4 +-
 2 files changed, 56 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/db90196d/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
index 6f14a45..8b65ab3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
@@ -2658,5 +2658,59 @@ public class SortMergeJoinIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+@Test
+public void testSubqueryWithoutData() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+
+try {
+String GRAMMAR_TABLE = CREATE TABLE IF NOT EXISTS GRAMMAR_TABLE 
(ID INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String LARGE_TABLE = CREATE TABLE IF NOT EXISTS LARGE_TABLE (ID 
INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String SECONDARY_LARGE_TABLE = CREATE TABLE IF NOT EXISTS 
SECONDARY_LARGE_TABLE (SEC_ID INTEGER PRIMARY KEY, +
+sec_unsig_id UNSIGNED_INT, sec_big_id BIGINT, 
sec_usnig_long_id UNSIGNED_LONG, sec_tiny_id TINYINT, + 
+sec_unsig_tiny_id UNSIGNED_TINYINT, sec_small_id 
SMALLINT, sec_unsig_small_id UNSIGNED_SMALLINT, + 
+sec_float_id FLOAT, sec_unsig_float_id UNSIGNED_FLOAT, 
sec_double_id DOUBLE, sec_unsig_double_id UNSIGNED_DOUBLE, +
+sec_decimal_id DECIMAL, sec_boolean_id BOOLEAN, 
sec_time_id TIME, sec_date_id DATE, +
+sec_timestamp_id TIMESTAMP, sec_unsig_time_id TIME, 
sec_unsig_date_id DATE, sec_unsig_timestamp_id TIMESTAMP, +
+sec_varchar_id VARCHAR (30), sec_char_id CHAR (30), 
sec_binary_id BINARY (100), sec_varbinary_id VARBINARY (100));
+createTestTable(getUrl(), GRAMMAR_TABLE);
+createTestTable(getUrl(), LARGE_TABLE);
+createTestTable(getUrl(), SECONDARY_LARGE_TABLE);
+
+String ddl = SELECT /*+USE_SORT_MERGE_JOIN*/ * FROM (SELECT ID, 
BIG_ID, DATE_ID FROM LARGE_TABLE AS A WHERE (A.ID % 5) = 0) AS A  +
+INNER JOIN (SELECT SEC_ID, SEC_TINY_ID, 
SEC_UNSIG_FLOAT_ID FROM SECONDARY_LARGE_TABLE AS B WHERE (B.SEC_ID % 5) = 0) AS 
B  + 
+ON A.ID=B.SEC_ID WHERE A.DATE_ID  ALL (SELECT 
SEC_DATE_ID FROM SECONDARY_LARGE_TABLE LIMIT 100)  +  
+AND B.SEC_UNSIG_FLOAT_ID = 

[48/49] phoenix git commit: minor changes based on jesses feedback

2015-06-24 Thread tdsilva
minor changes based on jesses feedback


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d1f7dede
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d1f7dede
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d1f7dede

Branch: refs/heads/json
Commit: d1f7dedeccbb0befce071cb87efd38290271039a
Parents: a4aa780
Author: Prashant Kommireddi 
pkommire...@pkommireddi-ltm.internal.salesforce.com
Authored: Mon Jun 15 16:18:47 2015 -0700
Committer: Eli Levine elilev...@apache.org
Committed: Mon Jun 15 18:17:45 2015 -0700

--
 .../src/main/java/org/apache/phoenix/pig/util/TypeUtil.java   | 7 +++
 1 file changed, 3 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d1f7dede/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
index 6e32fb5..5820ec6 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
@@ -76,9 +76,7 @@ public final class TypeUtil {
 private TypeUtil() {}
 
 /**
- * A map of Phoenix to Pig data types.
- * 
- * @return
+ * @return map of Phoenix to Pig data types.
  */
 private static ImmutableMapPDataType, Byte init() {
 final ImmutableMap.BuilderPDataType, Byte builder = new 
BuilderPDataType, Byte();
@@ -160,7 +158,8 @@ public final class TypeUtil {
 
 /**
  * This method encodes a value with Phoenix data type. It begins with 
checking whether an object is BINARY and makes
- * a call to {@link #castBytes(Object, PDataType)} to convery bytes to 
targetPhoenixType
+ * a call to {@link #castBytes(Object, PDataType)} to convert bytes to 
targetPhoenixType. It returns a {@link RuntimeException}
+ * when object can not be coerced.
  * 
  * @param o
  * @param targetPhoenixType



[04/16] phoenix git commit: PHOENIX-2025 Phoenix-core's hbase-default.xml prevents HBaseTestingUtility from starting up in client apps (Geoffrey Jacoby)

2015-06-24 Thread tdsilva
PHOENIX-2025 Phoenix-core's hbase-default.xml prevents HBaseTestingUtility from 
starting up in client apps (Geoffrey Jacoby)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fb44f353
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fb44f353
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fb44f353

Branch: refs/heads/json
Commit: fb44f35300510670b037f597ee66f709cb4d8dbb
Parents: 14d11b1
Author: James Taylor jamestay...@apache.org
Authored: Wed Jun 17 17:01:34 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Wed Jun 17 17:13:25 2015 -0700

--
 phoenix-core/src/it/resources/hbase-default.xml | 36 
 phoenix-core/src/it/resources/hbase-site.xml| 36 
 .../phoenix/compile/WhereCompilerTest.java  | 10 --
 .../phoenix/query/ConnectionlessTest.java   | 14 +---
 .../src/test/resources/hbase-default.xml| 36 
 5 files changed, 45 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fb44f353/phoenix-core/src/it/resources/hbase-default.xml
--
diff --git a/phoenix-core/src/it/resources/hbase-default.xml 
b/phoenix-core/src/it/resources/hbase-default.xml
deleted file mode 100644
index 691b702..000
--- a/phoenix-core/src/it/resources/hbase-default.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-?xml version=1.0?
-?xml-stylesheet type=text/xsl href=configuration.xsl?
-!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
---
-configuration
-  property
-namehbase.defaults.for.version.skip/name
-valuetrue/value
-description
-Set to true to skip the 'hbase.defaults.for.version' check.
-Setting this to true can be useful in contexts other than
-the other side of a maven generation; i.e. running in an
-ide.  You'll want to set this boolean to true to avoid
-seeing the RuntimException complaint: hbase-default.xml file
-seems to be for and old version of HBase (@@@VERSION@@@), this
-version is X.X.X-SNAPSHOT
-/description
-  /property
-/configuration

http://git-wip-us.apache.org/repos/asf/phoenix/blob/fb44f353/phoenix-core/src/it/resources/hbase-site.xml
--
diff --git a/phoenix-core/src/it/resources/hbase-site.xml 
b/phoenix-core/src/it/resources/hbase-site.xml
new file mode 100644
index 000..691b702
--- /dev/null
+++ b/phoenix-core/src/it/resources/hbase-site.xml
@@ -0,0 +1,36 @@
+?xml version=1.0?
+?xml-stylesheet type=text/xsl href=configuration.xsl?
+!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+--
+configuration
+  property
+namehbase.defaults.for.version.skip/name
+valuetrue/value
+description
+Set to true to skip the 'hbase.defaults.for.version' check.
+Setting this to true can be useful in contexts other than
+the other side of a maven generation; i.e. running in an
+ide.  You'll want to set this boolean to true to avoid
+seeing the RuntimException complaint: hbase-default.xml file
+seems to be for and old version of HBase (@@@VERSION@@@), this
+version is X.X.X-SNAPSHOT
+/description
+  /property
+/configuration


[12/16] phoenix git commit: PHOENIX-2021 - Implement ARRAY_CAT built in function (Dumindu Buddhika)

2015-06-24 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7385899d/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java
new file mode 100644
index 000..75d0827
--- /dev/null
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java
@@ -0,0 +1,584 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.expression;
+
+import static org.junit.Assert.assertEquals;
+
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.exception.DataExceedsCapacityException;
+import org.apache.phoenix.expression.function.ArrayConcatFunction;
+import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.TypeMismatchException;
+import org.apache.phoenix.schema.types.*;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class ArrayConcatFunctionTest {
+
+private static void testExpression(LiteralExpression array1, 
LiteralExpression array2, PhoenixArray expected)
+throws SQLException {
+ListExpression expressions = Lists.newArrayList((Expression) array1);
+expressions.add(array2);
+
+Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
+ImmutableBytesWritable ptr = new ImmutableBytesWritable();
+arrayConcatFunction.evaluate(null, ptr);
+PhoenixArray result = (PhoenixArray) 
arrayConcatFunction.getDataType().toObject(ptr, 
expressions.get(0).getSortOrder(), array1.getMaxLength(), array1.getScale());
+assertEquals(expected, result);
+}
+
+private static void test(PhoenixArray array1, PhoenixArray array2, 
PDataType array1DataType, Integer arr1MaxLen, Integer arr1Scale, PDataType 
array2DataType, Integer arr2MaxLen, Integer arr2Scale, PhoenixArray expected, 
SortOrder array1SortOrder, SortOrder array2SortOrder) throws SQLException {
+LiteralExpression array1Literal, array2Literal;
+array1Literal = LiteralExpression.newConstant(array1, array1DataType, 
arr1MaxLen, arr1Scale, array1SortOrder, Determinism.ALWAYS);
+array2Literal = LiteralExpression.newConstant(array2, array2DataType, 
arr2MaxLen, arr2Scale, array2SortOrder, Determinism.ALWAYS);
+testExpression(array1Literal, array2Literal, expected);
+}
+
+@Test
+public void testChar1() throws SQLException {
+Object[] o1 = new Object[]{aa, bb};
+Object[] o2 = new Object[]{c, d};
+Object[] e = new Object[]{aa, bb, c, d};
+PDataType type = PCharArray.INSTANCE;
+PDataType base = PChar.INSTANCE;
+
+PhoenixArray arr1 = new PhoenixArray(base, o1);
+PhoenixArray arr2 = new PhoenixArray(base, o2);
+PhoenixArray expected = new PhoenixArray(base, e);
+test(arr1, arr2, type, 2, null, type, 1, null, expected, 
SortOrder.ASC, SortOrder.ASC);
+test(arr1, arr2, type, 2, null, type, 1, null, expected, 
SortOrder.DESC, SortOrder.DESC);
+test(arr1, arr2, type, 2, null, type, 1, null, expected, 
SortOrder.ASC, SortOrder.DESC);
+test(arr1, arr2, type, 2, null, type, 1, null, expected, 
SortOrder.DESC, SortOrder.ASC);
+
+}
+
+@Test
+public void testChar2() throws SQLException {
+Object[] o1 = new Object[]{aa, bb};
+Object[] o2 = new Object[]{cc, dc, ee};
+Object[] e = new Object[]{aa, bb, cc, dc, ee};
+PDataType type = PCharArray.INSTANCE;
+PDataType base = PChar.INSTANCE;
+
+PhoenixArray arr1 = new PhoenixArray(base, o1);
+PhoenixArray arr2 = new PhoenixArray(base, o2);
+PhoenixArray expected = new PhoenixArray(base, e);
+test(arr1, arr2, type, 2, null, type, 2, null, expected, 
SortOrder.ASC, SortOrder.ASC);
+test(arr1, arr2, type, 2, null, type, 2, null, expected, 
SortOrder.ASC, SortOrder.DESC);
+   

[14/16] phoenix git commit: PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to create column with no name (Lukas Lalinsky)

2015-06-24 Thread tdsilva
PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to 
create column with no name (Lukas Lalinsky)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b58a62a5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b58a62a5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b58a62a5

Branch: refs/heads/json
Commit: b58a62a5e43dcbb37695a0ebf7a20ced13e99503
Parents: 7385899
Author: James Taylor jtay...@salesforce.com
Authored: Wed Jun 24 08:11:12 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Wed Jun 24 08:11:12 2015 -0700

--
 .../phoenix/query/ConnectionQueryServicesImpl.java| 14 +-
 1 file changed, 9 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b58a62a5/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index c5dde10..ddebf9f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1941,11 +1941,15 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 columnsToAdd += ,  + 
PhoenixDatabaseMetaData.INDEX_TYPE +   + 
PUnsignedTinyint.INSTANCE.getSqlTypeName()
 + ,  + 
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP +   + 
PLong.INSTANCE.getSqlTypeName();
 }
-// Ugh..need to assign to another local 
variable to keep eclipse happy.
-PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
-PhoenixDatabaseMetaData.SYSTEM_CATALOG,
-
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
-metaConnection = newMetaConnection;
+
+// If we have some new columns from 4.1-4.3 to 
add, add them now.
+if (!columnsToAdd.isEmpty()) {
+// Ugh..need to assign to another local 
variable to keep eclipse happy.
+PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
+
PhoenixDatabaseMetaData.SYSTEM_CATALOG,
+
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
+metaConnection = newMetaConnection;
+}
 
 if (currentServerSideTableTimeStamp  
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) {
 columnsToAdd = 
PhoenixDatabaseMetaData.BASE_COLUMN_COUNT +  



[09/16] phoenix git commit: PHOENIX-1920 - Pherf - Add support for mixed r/w workloads

2015-06-24 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java
--
diff --git 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java
 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java
index 78f18ca..c9333a0 100644
--- 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java
+++ 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryVerifier.java
@@ -43,153 +43,160 @@ import difflib.DiffUtils;
 import difflib.Patch;
 
 public class QueryVerifier {
-   private PhoenixUtil pUtil = new PhoenixUtil();
-   private static final Logger logger = LoggerFactory
-   .getLogger(QueryVerifier.class);
-   private boolean useTemporaryOutput;
-   private String directoryLocation;
-
-   public QueryVerifier(boolean useTemporaryOutput) {
-   this.useTemporaryOutput = useTemporaryOutput;
-   this.directoryLocation = this.useTemporaryOutput ? 
-   PherfConstants.EXPORT_TMP : 
PherfConstants.EXPORT_DIR;
-   
-   ensureBaseDirExists();
-   }
-   
-   /***
-* Export query resultSet to CSV file
-* @param query
-* @throws Exception
-*/
-   public String exportCSV(Query query) throws Exception {
-   Connection conn = null;
-   PreparedStatement statement = null;
-   ResultSet rs = null;
-   String fileName = getFileName(query);
-   FileOutputStream fos = new FileOutputStream(fileName);
-   try {
-   conn = pUtil.getConnection(query.getTenantId());
-   statement = conn.prepareStatement(query.getStatement());
-   boolean isQuery = statement.execute();
-   if (isQuery) {
-   rs = statement.executeQuery();
-   int columnCount = 
rs.getMetaData().getColumnCount();
-   while (rs.next()) {
-   for (int columnNum = 1; columnNum = 
columnCount; columnNum++) {
-   
fos.write((rs.getString(columnNum) + 
PherfConstants.RESULT_FILE_DELIMETER).getBytes());
-   }
-   
fos.write(PherfConstants.NEW_LINE.getBytes());
-   }
-   } else {
-   conn.commit();
-   }
-   } catch (Exception e) {
-   e.printStackTrace();
-   } finally {
-   if (rs != null) rs.close();
-   if (statement != null) statement.close();
-   if (conn != null) conn.close();
-   fos.flush();
-   fos.close();
-   }
-   return fileName;
-   }
-   
-   /***
-* Do a diff between exported query results and temporary CSV file
-* @param query
-* @param newCSV
-* @return
-*/
-   public boolean doDiff(Query query, String newCSV) {
+private PhoenixUtil pUtil = PhoenixUtil.create();
+private static final Logger logger = 
LoggerFactory.getLogger(QueryVerifier.class);
+private boolean useTemporaryOutput;
+private String directoryLocation;
+
+public QueryVerifier(boolean useTemporaryOutput) {
+this.useTemporaryOutput = useTemporaryOutput;
+this.directoryLocation =
+this.useTemporaryOutput ? PherfConstants.EXPORT_TMP : 
PherfConstants.EXPORT_DIR;
+
+ensureBaseDirExists();
+}
+
+/**
+ * Export query resultSet to CSV file
+ *
+ * @param query
+ * @throws Exception
+ */
+public String exportCSV(Query query) throws Exception {
+Connection conn = null;
+PreparedStatement statement = null;
+ResultSet rs = null;
+String fileName = getFileName(query);
+FileOutputStream fos = new FileOutputStream(fileName);
+try {
+conn = pUtil.getConnection(query.getTenantId());
+statement = conn.prepareStatement(query.getStatement());
+boolean isQuery = statement.execute();
+if (isQuery) {
+rs = statement.executeQuery();
+int columnCount = rs.getMetaData().getColumnCount();
+while (rs.next()) {
+for (int columnNum = 1; columnNum = columnCount; 
columnNum++) {
+fos.write((rs.getString(columnNum) + 
PherfConstants.RESULT_FILE_DELIMETER)
+.getBytes());
+}
+

[16/16] phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread tdsilva
PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3cf22a7d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3cf22a7d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3cf22a7d

Branch: refs/heads/json
Commit: 3cf22a7de4eaec6978763b6961d73aa9eaa07015
Parents: 50f3a04
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:16:51 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:16:51 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cf22a7d/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cf22a7d/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 9e27bac..5270277 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



[15/16] phoenix git commit: PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu)

2015-06-24 Thread tdsilva
PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with 
InvocationTargetException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/50f3a041
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/50f3a041
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/50f3a041

Branch: refs/heads/json
Commit: 50f3a04126c4fea59dc9eb978cef1399892d9a4a
Parents: b58a62a
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 00:44:25 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 00:44:25 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 22 
 1 file changed, 18 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/50f3a041/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index c6bd62f..cee1c85 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -36,8 +36,10 @@ import java.io.OutputStream;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.jar.Attributes;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
@@ -614,10 +616,22 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, 
1.0);
 FileOutputStream jarFos = new FileOutputStream(jarPath);
 JarOutputStream jarOutputStream = new JarOutputStream(jarFos, 
manifest);
-String pathToAdd =packageName.replace('.', File.separatorChar)
-+ File.separator;
-jarOutputStream.putNextEntry(new JarEntry(pathToAdd));
-jarOutputStream.closeEntry();
+String pathToAdd = packageName.replace('.', '/') + '/';
+String jarPathStr = new String(pathToAdd);
+SetString pathsInJar = new HashSetString();
+
+while (pathsInJar.add(jarPathStr)) {
+int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2);
+if (ix  0) {
+break;
+}
+jarPathStr = jarPathStr.substring(0, ix);
+}
+for (String pathInJar : pathsInJar) {
+jarOutputStream.putNextEntry(new JarEntry(pathInJar));
+jarOutputStream.closeEntry();
+}
+
 jarOutputStream.putNextEntry(new JarEntry(pathToAdd + 
classFile.getName()));
 byte[] allBytes = new byte[(int) classFile.length()];
 FileInputStream fis = new FileInputStream(classFile);



[02/16] phoenix git commit: PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing (Alicia Ying Shu)

2015-06-24 Thread tdsilva
PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing 
(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/80600488
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/80600488
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/80600488

Branch: refs/heads/json
Commit: 80600488f50fd000d74155ee17abfaa19ec39c69
Parents: db7b575
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:28:35 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:28:35 2015 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java  | 2 ++
 phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java | 3 +++
 .../src/test/java/org/apache/phoenix/query/BaseTest.java| 5 -
 3 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/80600488/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index b9d7180..3140077 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -98,6 +98,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(1, rs.getInt(1));
 assertEquals(121, rs.getInt(2));
 assertFalse(rs.next());
+conn.close();
 }
 
 protected void testUpdatableViewIndex(Integer saltBuckets) throws 
Exception {
@@ -179,6 +180,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 + CLIENT MERGE SORT,
 QueryUtil.getExplainPlan(rs));
 }
+conn.close();
 }
 
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/80600488/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 266438d..fb58a8f 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -92,8 +92,11 @@ public class ViewIT extends BaseViewIT {
 fail();
 } catch (ReadOnlyTableException e) {
 
+} finally {
+conn.close();
 }
 
+conn = DriverManager.getConnection(getUrl());
 int count = 0;
 ResultSet rs = conn.createStatement().executeQuery(SELECT k FROM v2);
 while (rs.next()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/80600488/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index fa78656..3f09518 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -115,6 +115,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -1634,7 +1635,9 @@ public abstract class BaseTest {
 for (HTableDescriptor table : tables) {
 String schemaName = 
SchemaUtil.getSchemaNameFromFullName(table.getName());
 if (!QueryConstants.SYSTEM_SCHEMA_NAME.equals(schemaName)) {
-admin.disableTable(table.getName());
+try{
+admin.disableTable(table.getName());
+} catch (TableNotEnabledException ignored){}
 admin.deleteTable(table.getName());
 }
 }



[11/16] phoenix git commit: PHOENIX-1920 - Pherf - Add support for mixed r/w workloads

2015-06-24 Thread tdsilva
PHOENIX-1920 - Pherf - Add support for mixed r/w workloads


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7175dcbc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7175dcbc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7175dcbc

Branch: refs/heads/json
Commit: 7175dcbc011dff48f6d041697ec84da98f80f729
Parents: 466eeb3
Author: cmarcel cmar...@salesforce.com
Authored: Fri Jun 19 16:34:41 2015 -0700
Committer: cmarcel cmar...@salesforce.com
Committed: Fri Jun 19 16:34:41 2015 -0700

--
 .gitignore  |   2 +
 phoenix-pherf/pom.xml   |  10 +-
 .../org/apache/phoenix/pherf/DataIngestIT.java  | 134 --
 .../org/apache/phoenix/pherf/PherfMainIT.java   |  36 ++
 .../apache/phoenix/pherf/ResultBaseTestIT.java  |  31 +-
 .../apache/phoenix/pherf/SchemaReaderIT.java|  17 +-
 .../java/org/apache/phoenix/pherf/Pherf.java| 179 +---
 .../apache/phoenix/pherf/PherfConstants.java|   8 +-
 .../phoenix/pherf/configuration/DataModel.java  |  10 -
 .../phoenix/pherf/configuration/Scenario.java   |  12 +-
 .../pherf/configuration/WriteParams.java|  72 +++
 .../pherf/configuration/XMLConfigParser.java|  25 +-
 .../phoenix/pherf/jmx/MonitorManager.java   | 153 ---
 .../phoenix/pherf/loaddata/DataLoader.java  | 332 --
 .../pherf/result/DataLoadThreadTime.java|  87 ++--
 .../pherf/result/DataLoadTimeSummary.java   |  54 +--
 .../phoenix/pherf/result/DataModelResult.java   |  68 ++-
 .../phoenix/pherf/result/QueryResult.java   |  17 +-
 .../phoenix/pherf/result/QuerySetResult.java|  40 +-
 .../org/apache/phoenix/pherf/result/Result.java |  11 +-
 .../phoenix/pherf/result/ResultHandler.java |   5 +
 .../phoenix/pherf/result/ResultManager.java |  19 +-
 .../apache/phoenix/pherf/result/ResultUtil.java | 119 +++--
 .../phoenix/pherf/result/ResultValue.java   |   4 +-
 .../apache/phoenix/pherf/result/RunTime.java| 179 
 .../phoenix/pherf/result/ScenarioResult.java|  44 +-
 .../apache/phoenix/pherf/result/ThreadTime.java |  34 +-
 .../phoenix/pherf/result/file/Extension.java|   3 +-
 .../phoenix/pherf/result/file/Header.java   |  11 +-
 .../pherf/result/impl/CSVResultHandler.java |  47 +-
 .../pherf/result/impl/ImageResultHandler.java   |  58 +--
 .../pherf/result/impl/XMLResultHandler.java |  36 +-
 .../phoenix/pherf/schema/SchemaReader.java  |   2 +-
 .../apache/phoenix/pherf/util/PhoenixUtil.java  |  64 ++-
 .../pherf/workload/MultiThreadedRunner.java | 153 +++
 .../pherf/workload/MultithreadedDiffer.java | 131 +++---
 .../pherf/workload/MultithreadedRunner.java | 170 ---
 .../phoenix/pherf/workload/QueryExecutor.java   | 459 ++-
 .../phoenix/pherf/workload/QueryVerifier.java   | 265 +--
 .../apache/phoenix/pherf/workload/Workload.java |  10 +
 .../pherf/workload/WorkloadExecutor.java| 109 ++---
 .../phoenix/pherf/workload/WriteWorkload.java   | 403 
 .../scenario/prod_test_unsalted_scenario.xml|  35 ++
 .../phoenix/pherf/ConfigurationParserTest.java  | 102 +++--
 .../org/apache/phoenix/pherf/ResultTest.java|   5 +-
 .../apache/phoenix/pherf/RuleGeneratorTest.java |  15 +-
 .../test/resources/scenario/test_scenario.xml   |  58 ++-
 47 files changed, 2171 insertions(+), 1667 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/.gitignore
--
diff --git a/.gitignore b/.gitignore
index fc0e4af..b918d76 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,3 +22,5 @@
 target/
 release/
 RESULTS/
+CSV_EXPORT/
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7175dcbc/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 1667c66..0facbde 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -16,7 +16,8 @@
   ~   limitations under the License.
   --
 
-project xmlns=http://maven.apache.org/POM/4.0.0; 
xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+project xmlns=http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
  xsi:schemaLocation=http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd;
 modelVersion4.0.0/modelVersion
 parent
@@ -30,7 +31,7 @@
 namePhoenix - Pherf/name
 
 properties
-  top.dir${project.basedir}/../top.dir
+top.dir${project.basedir}/../top.dir
 /properties
 
 profiles
@@ -233,6 +234,11 @@
 
 !-- Test Dependencies --
 dependency
+groupIdcom.jcabi/groupId
+

[08/16] phoenix git commit: PHOENIX-2057 Acquire lock in MetaDataEndPointImpl.addRowsToChildViews() before calling doGetTable()

2015-06-24 Thread tdsilva
PHOENIX-2057 Acquire lock in MetaDataEndPointImpl.addRowsToChildViews() before 
calling doGetTable()


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/466eeb35
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/466eeb35
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/466eeb35

Branch: refs/heads/json
Commit: 466eeb35f904c1c768dd3da7b396001826a1b40c
Parents: e78eb6f
Author: Samarth samarth.j...@salesforce.com
Authored: Thu Jun 18 20:14:53 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Thu Jun 18 20:14:53 2015 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java   | 12 ++--
 1 file changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/466eeb35/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 077e325..b848565 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -1160,13 +1160,15 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 }
 
 
-private static void acquireLock(Region region, byte[] key, ListRowLock 
locks)
+
+private static RowLock acquireLock(Region region, byte[] key, 
ListRowLock locks)
 throws IOException {
 RowLock rowLock = region.getRowLock(key, true);
 if (rowLock == null) {
 throw new IOException(Failed to acquire lock on  + 
Bytes.toStringBinary(key));
 }
 locks.add(rowLock);
+return rowLock;
 }
 
 private static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
@@ -1579,18 +1581,16 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 byte[] viewSchemaName = 
rowViewKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
 byte[] viewName = 
rowViewKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
 byte[] viewKey = SchemaUtil.getTableKey(viewTenantId, 
viewSchemaName, viewName);
-PTable view = doGetTable(viewKey, clientTimeStamp);
+// lock the rows corresponding to views so that no other thread 
can modify the view meta-data
+RowLock viewRowLock = acquireLock(region, viewKey, locks);
+PTable view = doGetTable(viewKey, clientTimeStamp, viewRowLock);
 
 if (view.getBaseColumnCount() == 
QueryConstants.DIVORCED_VIEW_BASE_COLUMN_COUNT) {
 // if a view has divorced itself from the base table, we don't 
allow schema changes
 // to be propagated to it.
 return;
 }
-// lock the rows corresponding to views so that no other thread 
can modify the view meta-data
-acquireLock(region, viewKey, locks);
-
 int deltaNumberOfColumns = 0;
-
 for (Mutation m : tableMetadata) {
 byte[][] rkmd = new byte[5][];
 int pkCount = getVarChars(m.getRow(), rkmd);



[05/16] phoenix git commit: PHOENIX-2049 Change ArraysWithNullsIT to be derived from BaseHBaseManagedTimeIT

2015-06-24 Thread tdsilva
PHOENIX-2049 Change ArraysWithNullsIT to be derived from BaseHBaseManagedTimeIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2d70eff6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2d70eff6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2d70eff6

Branch: refs/heads/json
Commit: 2d70eff6594d0f46b10f2d9c4c8fa5d43d6ba5ab
Parents: fb44f35
Author: James Taylor jamestay...@apache.org
Authored: Wed Jun 17 17:09:33 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Wed Jun 17 17:13:31 2015 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2d70eff6/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
index b034193..e95a386 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
@@ -26,7 +26,7 @@ import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.schema.types.PhoenixArray;
 import org.junit.Test;
 
-public class ArraysWithNullsIT extends BaseClientManagedTimeIT {
+public class ArraysWithNullsIT extends BaseHBaseManagedTimeIT {
 
 @Test
 public void testArrayUpsertIntWithNulls() throws Exception {



[11/31] phoenix git commit: PHOENIX-1964 - porting from master

2015-06-24 Thread ndimiduk
PHOENIX-1964 - porting from master


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c95e28df
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c95e28df
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c95e28df

Branch: refs/heads/4.x-HBase-1.1
Commit: c95e28df94241f47d5cfe9a1515b21960c93adf2
Parents: 0e0b4dd
Author: cmarcel cmar...@salesforce.com
Authored: Wed May 27 13:58:45 2015 -0700
Committer: cmarcel cmar...@salesforce.com
Committed: Wed May 27 13:58:45 2015 -0700

--
 phoenix-pherf/config/pherf.properties   |  3 ++
 .../org/apache/phoenix/pherf/DataIngestIT.java  |  3 +-
 .../apache/phoenix/pherf/ResultBaseTestIT.java  | 45 ++
 .../java/org/apache/phoenix/pherf/Pherf.java|  7 +--
 .../apache/phoenix/pherf/PherfConstants.java| 50 +++-
 .../phoenix/pherf/loaddata/DataLoader.java  |  2 +-
 .../apache/phoenix/pherf/result/ResultUtil.java |  4 +-
 .../pherf/result/impl/CSVResultHandler.java |  5 +-
 .../pherf/result/impl/ImageResultHandler.java   |  5 +-
 .../pherf/result/impl/XMLResultHandler.java |  6 ++-
 .../apache/phoenix/pherf/util/ResourceList.java | 26 --
 .../pherf/workload/WorkloadExecutor.java|  2 +-
 .../phoenix/pherf/ConfigurationParserTest.java  |  2 +-
 .../org/apache/phoenix/pherf/ResourceTest.java  |  8 ++--
 .../apache/phoenix/pherf/ResultBaseTest.java| 44 +
 .../org/apache/phoenix/pherf/ResultTest.java|  5 +-
 16 files changed, 168 insertions(+), 49 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/config/pherf.properties
--
diff --git a/phoenix-pherf/config/pherf.properties 
b/phoenix-pherf/config/pherf.properties
index 354707a..1142f9b5 100644
--- a/phoenix-pherf/config/pherf.properties
+++ b/phoenix-pherf/config/pherf.properties
@@ -29,3 +29,6 @@ pherf.default.dataloader.threadpool=0
 # When upserting, this is the max # of rows that will be inserted in a single 
commit
 pherf.default.dataloader.batchsize=1000
 
+# Directory where results from a scenario run will be written
+pherf.default.results.dir=RESULTS
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index b29656d..2b56f43 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -18,7 +18,6 @@
 
 package org.apache.phoenix.pherf;
 
-import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.pherf.configuration.Column;
 import org.apache.phoenix.pherf.configuration.DataTypeMapping;
 import org.apache.phoenix.pherf.configuration.Scenario;
@@ -39,7 +38,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-public class DataIngestIT extends BaseHBaseManagedTimeIT {
+public class DataIngestIT extends ResultBaseTestIT {
 protected static PhoenixUtil util = new PhoenixUtil(true);
 static final String matcherScenario = .*scenario/.*test.*xml;
 static final String matcherSchema = .*datamodel/.*test.*sql;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
new file mode 100644
index 000..6e103b8
--- /dev/null
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   License); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an AS IS BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the specific language governing 

[26/31] phoenix git commit: PHOENIX-2032 psql.py is broken after PHOENIX-2013

2015-06-24 Thread ndimiduk
PHOENIX-2032 psql.py is broken after PHOENIX-2013


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d0bcb7b2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d0bcb7b2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d0bcb7b2

Branch: refs/heads/4.x-HBase-1.1
Commit: d0bcb7b2304133031b945d50e01f0f1d5fd023d4
Parents: e64f61b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri Jun 12 10:23:05 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri Jun 12 17:01:27 2015 -0700

--
 phoenix-assembly/pom.xml  |  4 
 phoenix-assembly/src/build/client.xml | 27 +++
 2 files changed, 23 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d0bcb7b2/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index d275d03..ebc5d71 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -152,10 +152,6 @@
 /dependency
 dependency
   groupIdorg.apache.phoenix/groupId
-  artifactIdphoenix-spark/artifactId
-/dependency
-dependency
-  groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-server/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d0bcb7b2/phoenix-assembly/src/build/client.xml
--
diff --git a/phoenix-assembly/src/build/client.xml 
b/phoenix-assembly/src/build/client.xml
index 101ccd6..e99bb19 100644
--- a/phoenix-assembly/src/build/client.xml
+++ b/phoenix-assembly/src/build/client.xml
@@ -53,13 +53,32 @@
 /dependencySet
 
 !-- Make sure we get all the components, not just the minimal client ones 
(e.g.
-  phoenix-flume, phoenix-pig, etc) --
+  phoenix-flume, phoenix-pig, etc). We should exclude phoenix-server and
+  phoenix-server-client in the future, see PHOENIX-2032, PHOENIX-2038 --
 dependencySet
   outputDirectory//outputDirectory
   unpacktrue/unpack
-  includes
-includeorg.apache.phoenix:phoenix-*/include
-  /includes
+  !-- multiple deps provide some variant of LICENSE files/directories. 
These
+   overwrite each other at best, at worst conflict on case-insensitive
+   filesystems like HDFS+ and FAT32. Just exclude them --
+  unpackOptions
+excludes
+  exclude*license*/exclude
+  exclude*LICENSE*/exclude
+  exclude**/license/**/exclude
+  exclude**/LICENSE/**/exclude
+/excludes
+  /unpackOptions
+  !-- this is default, but make intentions clear --
+  useTransitiveDependenciestrue/useTransitiveDependencies
+  !-- When include subelements are present, they define a set of
+   artifact coordinates to include. If none is present, then includes
+   represents all valid values
+   
https://maven.apache.org/plugins/maven-assembly-plugin/assembly.html#class_dependencySet
+   This means bring in all dependencies transitively of the
+   phoenix-assembly module.
+  --
+  includes /
 /dependencySet
   /dependencySets
 /assembly



[12/31] phoenix git commit: PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia Ying Shu)

2015-06-24 Thread ndimiduk
PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a600cc4d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a600cc4d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a600cc4d

Branch: refs/heads/4.x-HBase-1.1
Commit: a600cc4d7acc2c828ae7782e59d094f99e5631f0
Parents: c95e28d
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri May 29 17:12:25 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri May 29 17:13:08 2015 -0700

--
 .../src/main/java/org/apache/phoenix/schema/PTableImpl.java  | 4 ++--
 .../src/test/java/org/apache/phoenix/query/BaseTest.java | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a600cc4d/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index bf4420c..bdc95b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -88,8 +88,8 @@ public class PTableImpl implements PTable {
 
 private PTableKey key;
 private PName name;
-private PName schemaName;
-private PName tableName;
+private PName schemaName = PName.EMPTY_NAME;
+private PName tableName = PName.EMPTY_NAME;
 private PName tenantId;
 private PTableType type;
 private PIndexState state;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a600cc4d/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..b0574c3 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -121,7 +121,6 @@ import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.ipc.controller.ServerRpcControllerFactory;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.LocalIndexMerger;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -829,6 +828,7 @@ public abstract class BaseTest {
 logger.info(Table  + fullTableName +  is already 
deleted.);
 }
 }
+rs.close();
 if (lastTenantId != null) {
 conn.close();
 }
@@ -860,6 +860,7 @@ public abstract class BaseTest {
 logger.info(DROP SEQUENCE STATEMENT: DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 conn.createStatement().execute(DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 }
+rs.close();
 }
 
 protected static void initSumDoubleValues(byte[][] splits, String url) 
throws Exception {



[18/31] phoenix git commit: PHOENIX-1962 Apply check style to the build

2015-06-24 Thread ndimiduk
PHOENIX-1962 Apply check style to the build


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/29ea5035
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/29ea5035
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/29ea5035

Branch: refs/heads/4.x-HBase-1.1
Commit: 29ea503546265a619ce501c477a109b69f940a00
Parents: f2be913
Author: Nick Dimiduk ndimi...@apache.org
Authored: Sat May 9 11:10:54 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 12:21:48 2015 -0700

--
 phoenix-assembly/pom.xml|   4 +
 phoenix-core/pom.xml|   4 +
 phoenix-flume/pom.xml   |   4 +
 phoenix-pherf/pom.xml   |   1 +
 phoenix-pig/pom.xml |   4 +
 phoenix-server-client/pom.xml   |   4 +
 phoenix-server/pom.xml  |   4 +
 phoenix-spark/pom.xml   |   1 +
 pom.xml |  23 ++
 src/main/config/checkstyle/checker.xml  | 281 +++
 src/main/config/checkstyle/header.txt   |  16 ++
 src/main/config/checkstyle/suppressions.xml |  46 
 12 files changed, 392 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 04d9335..d275d03 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -33,6 +33,10 @@
   descriptionAssemble Phoenix artifacts/description
   packagingpom/packaging
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 951e969..6302441 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -24,6 +24,10 @@
   urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 resources
   resource

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index ea87ab0..c7f0650 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index e751d73..dd45075 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -30,6 +30,7 @@
 namePhoenix - Pherf/name
 
 properties
+  top.dir${project.basedir}/../top.dir
 /properties
 
 profiles

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 957c06f..55b34d3 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
 dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 748e57c..3e54a07 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index ab9a472..86b2525 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin


[06/31] phoenix git commit: Changing version to 4.5.0-HBase-1.1-SNAPSHOT

2015-06-24 Thread ndimiduk
Changing version to 4.5.0-HBase-1.1-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3cdc3230
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3cdc3230
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3cdc3230

Branch: refs/heads/4.x-HBase-1.1
Commit: 3cdc3230c570ee8c22bb6c1bab975699fd02e94c
Parents: 56e1c0a
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon May 25 17:46:18 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon May 25 17:46:18 2015 +0530

--
 phoenix-assembly/pom.xml  | 2 +-
 phoenix-core/pom.xml  | 2 +-
 phoenix-flume/pom.xml | 2 +-
 phoenix-pherf/pom.xml | 2 +-
 phoenix-pig/pom.xml   | 2 +-
 phoenix-server-client/pom.xml | 2 +-
 phoenix-server/pom.xml| 2 +-
 phoenix-spark/pom.xml | 2 +-
 pom.xml   | 2 +-
 9 files changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 8d9a965..04d9335 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-assembly/artifactId
   namePhoenix Assembly/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 22e6b60..951e969 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-core/artifactId
   namePhoenix Core/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index b2b9a47..ea87ab0 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 0901f71..e751d73 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -22,7 +22,7 @@
 parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
 /parent
 
 artifactIdphoenix-pherf/artifactId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 015a660..957c06f 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 4d6fd45..748e57c 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-server-client/artifactId
   namePhoenix Query Server Client/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index 9f6289f..ab9a472 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+

[02/31] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-06-24 Thread ndimiduk
PHOENIX-1681 Use the new Region Interface (Andrew Purtell)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ea622d5f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ea622d5f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ea622d5f

Branch: refs/heads/4.x-HBase-1.1
Commit: ea622d5f7ab5c37d2ecf8be6054e5ed42f36a035
Parents: 98271b8
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:22:54 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:30:56 2015 -0700

--
 ...ReplayWithIndexWritesAndCompressedWALIT.java |  4 +-
 .../EndToEndCoveredColumnsIndexBuilderIT.java   |  4 +-
 .../IndexHalfStoreFileReaderGenerator.java  |  9 +-
 .../regionserver/IndexSplitTransaction.java | 65 +-
 .../hbase/regionserver/LocalIndexMerger.java| 16 ++--
 .../hbase/regionserver/LocalIndexSplitter.java  | 11 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 26 +++---
 .../GroupedAggregateRegionObserver.java | 13 +--
 .../coprocessor/MetaDataEndpointImpl.java   | 94 ++--
 .../phoenix/coprocessor/ScanRegionObserver.java | 17 ++--
 .../coprocessor/SequenceRegionObserver.java | 16 ++--
 .../UngroupedAggregateRegionObserver.java   | 29 +++---
 .../hbase/index/covered/data/LocalTable.java|  5 +-
 .../write/ParallelWriterIndexCommitter.java |  8 +-
 .../recovery/PerRegionIndexWriteCache.java  | 10 +--
 .../recovery/StoreFailuresInCachePolicy.java|  4 +-
 .../TrackingParallelWriterIndexCommitter.java   |  8 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  4 +-
 .../apache/phoenix/index/PhoenixIndexCodec.java | 14 ++-
 .../schema/stats/StatisticsCollector.java   | 14 +--
 .../phoenix/schema/stats/StatisticsScanner.java | 16 ++--
 .../phoenix/schema/stats/StatisticsWriter.java  | 16 ++--
 .../java/org/apache/phoenix/util/IndexUtil.java | 38 
 .../index/covered/TestLocalTableState.java  |  8 +-
 .../index/write/TestWALRecoveryCaching.java | 17 ++--
 .../recovery/TestPerRegionIndexWriteCache.java  |  6 +-
 26 files changed, 230 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 3b8ff29..611ba68 100644
--- 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -159,7 +159,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
   }
 
   /**
-   * Test writing edits into an HRegion, closing it, splitting logs, opening 
Region again. Verify
+   * Test writing edits into an region, closing it, splitting logs, opening 
Region again. Verify
* seqids.
* @throws Exception on failure
*/
@@ -183,7 +183,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
 builder.build(htd);
 
 // create the region + its WAL
-HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, htd);
+HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, 
htd); // FIXME: Uses private type
 region0.close();
 region0.getWAL().close();
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
index d90733f..6b2309e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.util.EnvironmentEdge;
 import 

[04/31] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-06-24 Thread ndimiduk
PHOENIX-1763 Support building with HBase-1.1.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/98271b88
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/98271b88
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/98271b88

Branch: refs/heads/4.x-HBase-1.1
Commit: 98271b888c113f10e174205434e05d3b36b7eb67
Parents: bf01eb2
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:08:26 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:30:56 2015 -0700

--
 phoenix-core/pom.xml| 17 +++--
 .../regionserver/IndexHalfStoreFileReader.java  | 31 ++--
 .../regionserver/IndexSplitTransaction.java | 39 --
 .../hbase/regionserver/LocalIndexMerger.java|  3 +-
 .../cache/aggcache/SpillableGroupByCache.java   | 13 +++-
 .../phoenix/coprocessor/BaseRegionScanner.java  | 12 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 77 +++-
 .../coprocessor/DelegateRegionScanner.java  | 23 --
 .../GroupedAggregateRegionObserver.java | 53 --
 .../coprocessor/HashJoinRegionScanner.java  | 60 ---
 .../coprocessor/MetaDataRegionObserver.java | 23 +++---
 .../phoenix/coprocessor/ScanRegionObserver.java | 11 ++-
 .../UngroupedAggregateRegionObserver.java   | 55 +++---
 .../hbase/index/covered/data/LocalTable.java|  2 +-
 .../index/covered/filter/FamilyOnlyFilter.java  |  6 +-
 .../index/scanner/FilteredKeyValueScanner.java  |  2 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  6 +-
 .../iterate/RegionScannerResultIterator.java|  9 ++-
 .../phoenix/schema/stats/StatisticsScanner.java | 10 ++-
 .../hbase/ipc/PhoenixIndexRpcSchedulerTest.java |  6 +-
 .../index/covered/TestLocalTableState.java  |  1 -
 .../covered/filter/TestFamilyOnlyFilter.java| 12 +--
 .../index/write/TestWALRecoveryCaching.java |  4 +-
 phoenix-flume/pom.xml   |  9 ---
 phoenix-pig/pom.xml | 31 +---
 phoenix-spark/pom.xml   |  7 ++
 pom.xml | 41 ++-
 27 files changed, 361 insertions(+), 202 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 45b8d73..22e6b60 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -350,16 +350,25 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-annotations/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
 /dependency
 dependency
@@ -369,18 +378,16 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
   typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
@@ -391,13 +398,11 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
index 49e2022..9befc8c 100644
--- 

phoenix git commit: PHOENIX-1975 Detect and use HBASE_HOME when set

2015-06-24 Thread ndimiduk
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 8e309c2b4 - 65361f357


PHOENIX-1975 Detect and use HBASE_HOME when set


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/65361f35
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/65361f35
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/65361f35

Branch: refs/heads/4.x-HBase-0.98
Commit: 65361f357ff46bcad581b981b012053f7e39a5cd
Parents: 8e309c2
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 24 13:59:00 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 24 13:59:47 2015 -0700

--
 bin/phoenix_utils.py | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/65361f35/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 383e0e1..bfb4737 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -65,7 +65,15 @@ def setPath():
 phoenix_class_path = os.getenv('PHOENIX_CLASS_PATH','')
 
 global hbase_conf_dir
-hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH', 
'.'))
+# if HBASE_CONF_DIR set explicitly, use that
+hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH'))
+if not hbase_conf_dir:
+# else fall back to HBASE_HOME
+if os.getenv('HBASE_HOME'):
+hbase_conf_dir = os.path.join(os.getenv('HBASE_HOME'), conf)
+else:
+# default to pwd
+hbase_conf_dir = '.'
 global hbase_conf_path # keep conf_path around for backward compatibility
 hbase_conf_path = hbase_conf_dir
 



[13/31] phoenix git commit: PHOENIX-2010 Properly validate number of arguments passed to the functions in FunctionParseNode#validate(Rajeshbabu)

2015-06-24 Thread ndimiduk
PHOENIX-2010 Properly validate number of arguments passed to the functions in 
FunctionParseNode#validate(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b2c0cb90
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b2c0cb90
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b2c0cb90

Branch: refs/heads/4.x-HBase-1.1
Commit: b2c0cb9002ee881f21d968817c386a98d39074ca
Parents: a600cc4
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sun May 31 07:40:39 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sun May 31 07:40:39 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java   | 14 ++
 .../org/apache/phoenix/parse/FunctionParseNode.java   |  4 
 .../main/java/org/apache/phoenix/parse/PFunction.java |  4 +---
 3 files changed, 19 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 7dbde3c..868e19d 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -442,6 +442,20 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
+try {
+rs = stmt.executeQuery(select k from t9 where 
mysum9(k,10,'x')=11);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
+try {
+rs = stmt.executeQuery(select mysum9() from t9);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
 stmt.execute(drop function mysum9);
 try {
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
index d1001ee..be52d89 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
@@ -41,6 +41,7 @@ import 
org.apache.phoenix.expression.function.FunctionExpression;
 import org.apache.phoenix.expression.function.UDFExpression;
 import org.apache.phoenix.parse.PFunction.FunctionArgument;
 import org.apache.phoenix.schema.ArgumentTypeMismatchException;
+import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDataTypeFactory;
@@ -133,6 +134,9 @@ public class FunctionParseNode extends CompoundParseNode {
 public ListExpression validate(ListExpression children, 
StatementContext context) throws SQLException {
 BuiltInFunctionInfo info = this.getInfo();
 BuiltInFunctionArgInfo[] args = info.getArgs();
+if (args.length  children.size() || info.getRequiredArgCount()  
children.size()) {
+throw new FunctionNotFoundException(this.name);
+}
 if (args.length  children.size()) {
 ListExpression moreChildren = new 
ArrayListExpression(children);
 for (int i = children.size(); i  info.getArgs().length; i++) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
index 351bec7..aeed3ac 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
@@ -96,9 +96,7 @@ public class PFunction implements PMetaDataEntity {
 }
 
 public 

[27/31] phoenix git commit: PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)

2015-06-24 Thread ndimiduk
PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/43c722ca
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/43c722ca
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/43c722ca

Branch: refs/heads/4.x-HBase-1.1
Commit: 43c722ca6d2d55347d1f2caf7641ce03339e1e1e
Parents: d0bcb7b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Mon Jun 15 16:16:03 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 15 16:16:30 2015 -0700

--
 phoenix-assembly/pom.xml |  4 
 phoenix-spark/pom.xml| 51 ---
 2 files changed, 32 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/43c722ca/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index ebc5d71..d275d03 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -152,6 +152,10 @@
 /dependency
 dependency
   groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-spark/artifactId
+/dependency
+dependency
+  groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-server/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/43c722ca/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 1747573..aea5c7e 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -45,12 +45,7 @@
   groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-core/artifactId
 /dependency
-dependency
-  groupIdorg.apache.phoenix/groupId
-  artifactIdphoenix-core/artifactId
-  classifiertests/classifier
-  scopetest/scope
-/dependency
+
 !-- Force import of Spark's servlet API for unit tests --
 dependency
   groupIdjavax.servlet/groupId
@@ -59,16 +54,38 @@
   scopetest/scope
 /dependency
 
+!-- Mark Spark / Scala as provided --
 dependency
-  groupIdjunit/groupId
-  artifactIdjunit/artifactId
+  groupIdorg.scala-lang/groupId
+  artifactIdscala-library/artifactId
+  version${scala.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-core_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-sql_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+
+!-- Test dependencies --
+dependency
+  groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-core/artifactId
+  classifiertests/classifier
   scopetest/scope
 /dependency
 
 dependency
-  groupIdorg.scala-lang/groupId
-  artifactIdscala-library/artifactId
-  version${scala.version}/version
+  groupIdjunit/groupId
+  artifactIdjunit/artifactId
+  scopetest/scope
 /dependency
 
 dependency
@@ -86,18 +103,6 @@
 /dependency
 
 dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-core_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-sql_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-client/artifactId
   version${hadoop-two.version}/version



[31/31] phoenix git commit: PHOENIX-1975 Detect and use HBASE_HOME when set

2015-06-24 Thread ndimiduk
PHOENIX-1975 Detect and use HBASE_HOME when set


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/980d29c5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/980d29c5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/980d29c5

Branch: refs/heads/4.x-HBase-1.1
Commit: 980d29c5acf785dc90ece1a7f047711e8d522a2e
Parents: 05b1b8b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 24 13:59:00 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 24 13:59:28 2015 -0700

--
 bin/phoenix_utils.py | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/980d29c5/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 383e0e1..bfb4737 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -65,7 +65,15 @@ def setPath():
 phoenix_class_path = os.getenv('PHOENIX_CLASS_PATH','')
 
 global hbase_conf_dir
-hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH', 
'.'))
+# if HBASE_CONF_DIR set explicitly, use that
+hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH'))
+if not hbase_conf_dir:
+# else fall back to HBASE_HOME
+if os.getenv('HBASE_HOME'):
+hbase_conf_dir = os.path.join(os.getenv('HBASE_HOME'), conf)
+else:
+# default to pwd
+hbase_conf_dir = '.'
 global hbase_conf_path # keep conf_path around for backward compatibility
 hbase_conf_path = hbase_conf_dir
 



[29/31] phoenix git commit: PHOENIX-1941 Phoenix tests are failing in linux env with missing class: StaticMapping (Alicia Ying Shu)

2015-06-24 Thread ndimiduk
PHOENIX-1941 Phoenix tests are failing in linux env with missing class: 
StaticMapping (Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/329d7494
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/329d7494
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/329d7494

Branch: refs/heads/4.x-HBase-1.1
Commit: 329d74948521ed974593e455369a27d9cd705249
Parents: 52f5b04
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:17:33 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:23:47 2015 -0700

--
 .../phoenix/end2end/End2EndTestDriver.java   | 19 +++
 1 file changed, 15 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/329d7494/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
index 26d18cf..743f729 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
@@ -21,6 +21,7 @@ package org.apache.phoenix.end2end;
 
 import java.io.IOException;
 import java.io.PrintStream;
+import java.lang.annotation.Annotation;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -79,10 +80,20 @@ public class End2EndTestDriver extends AbstractHBaseTool {
 
   @Override
   public boolean isCandidateClass(Class? c) {
-return testFilterRe.matcher(c.getName()).find() 
-  // Our pattern will match the below NON-IntegrationTest. Rather than
-  // do exotic regex, just filter it out here
-  super.isCandidateClass(c);
+  Annotation[] annotations = c.getAnnotations();
+  for (Annotation curAnnotation : annotations) {
+  if 
(curAnnotation.toString().contains(NeedsOwnMiniClusterTest)) {
+  /* Skip tests that aren't designed to run against a live 
cluster.
+   * For a live cluster, we cannot bring it up and down as 
required
+   * for these tests to run.
+   */
+  return false;
+  }
+  }
+  return testFilterRe.matcher(c.getName()).find() 
+  // Our pattern will match the below NON-IntegrationTest. 
Rather than
+  // do exotic regex, just filter it out here
+  super.isCandidateClass(c);
   }
 }
 



[07/31] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode

2015-06-24 Thread ndimiduk
PHOENIX-2005 Connection utilities omit zk client port, parent znode


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c6b37b97
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c6b37b97
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c6b37b97

Branch: refs/heads/4.x-HBase-1.1
Commit: c6b37b979da1b514bcb9257c7e095e39b0c2c215
Parents: 3cdc323
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 11:11:48 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 13:27:03 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java | 28 --
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 93 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  | 26 +-
 .../query/ConnectionQueryServicesImpl.java  |  4 +-
 .../java/org/apache/phoenix/util/QueryUtil.java | 45 --
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 14 ++-
 .../phoenix/mapreduce/CsvBulkLoadToolTest.java  | 11 ---
 .../mapreduce/CsvToKeyValueMapperTest.java  | 15 
 .../org/apache/phoenix/util/QueryUtilTest.java  | 33 ---
 9 files changed, 139 insertions(+), 130 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c6b37b97/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 9e95667..2451603 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -31,6 +31,7 @@ import java.util.logging.Logger;
 
 import javax.annotation.concurrent.Immutable;
 
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -174,10 +175,10 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 }
 
 /**
- * 
+ *
  * Class to encapsulate connection info for HBase
  *
- * 
+ *
  * @since 0.1.1
  */
 public static class ConnectionInfo {
@@ -204,12 +205,18 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 return false;
 }
 
-protected static ConnectionInfo create(String url) throws SQLException 
{
-StringTokenizer tokenizer = new StringTokenizer(url == null ?  : 
url.substring(PhoenixRuntime.JDBC_PROTOCOL.length()),DELIMITERS, true);
+public static ConnectionInfo create(String url) throws SQLException {
+url = url == null ?  : url;
+url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
+? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
+: url;
+StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];
 String token = null;
-while (tokenizer.hasMoreTokens()  
!(token=tokenizer.nextToken()).equals(TERMINATOR)  tokenizer.hasMoreTokens() 
 nTokens  tokens.length) {
+while (tokenizer.hasMoreTokens() 
+!(token=tokenizer.nextToken()).equals(TERMINATOR) 
+tokenizer.hasMoreTokens()  nTokens  tokens.length) {
 token = tokenizer.nextToken();
 // This would mean we have an empty string for a token which 
is illegal
 if (DELIMITERS.contains(token)) {
@@ -316,8 +323,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 private final String principal;
 private final String keytab;
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode, 
String principal, String keytab) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode, String principal, String keytab) {
 this.zookeeperQuorum = zookeeperQuorum;
 this.port = port;
 this.rootNode = rootNode;
@@ -326,8 +332,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 this.keytab = keytab;
 }
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode) {
this(zookeeperQuorum, port, rootNode, null, null);
 }
 
@@ -417,6 +422,11 @@ 

[03/31] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-06-24 Thread ndimiduk
http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 2db1af6..015a660 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -54,7 +54,6 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-testing-util/artifactId
-  version${hbase.version}/version
   scopetest/scope
   optionaltrue/optional
   exclusions
@@ -67,7 +66,6 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
   exclusions
@@ -80,41 +78,56 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
-  version${hbase.version}/version
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-client/artifactId
-  version${hbase.version}/version
+/dependency
+   dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+  typetest-jar/type
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-client/artifactId
+  typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index adeed88..a232cf4 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -460,6 +460,13 @@
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+  version${hbase.version}/version
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
   version${hbase.version}/version
   typetest-jar/type

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/pom.xml
--
diff --git a/pom.xml b/pom.xml
index d310c37..4361e54 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,7 +78,7 @@
 test.output.tofiletrue/test.output.tofile
 
 !-- Hadoop Versions --
-hbase.version1.0.1/hbase.version
+hbase.version1.1.0/hbase.version
 hadoop-two.version2.5.1/hadoop-two.version
 
 !-- Dependency versions --
@@ -452,6 +452,11 @@
   !-- HBase dependencies --
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-annotations/artifactId
+version${hbase.version}/version
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
 artifactIdhbase-testing-util/artifactId
 version${hbase.version}/version
 scopetest/scope
@@ -488,13 +493,34 @@
   /dependency
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-common/artifactId
+version${hbase.version}/version
+typetest-jar/type
+scopetest/scope
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
 artifactIdhbase-client/artifactId
 version${hbase.version}/version
   /dependency
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-client/artifactId
+version${hbase.version}/version
+typetest-jar/type
+scopetest/scope
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
+artifactIdhbase-server/artifactId
+version${hbase.version}/version
+  /dependency
+  dependency
+

[10/31] phoenix git commit: PHOENIX-2013 Apply PHOENIX-1995 to runnable uberjar as well

2015-06-24 Thread ndimiduk
PHOENIX-2013 Apply PHOENIX-1995 to runnable uberjar as well


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0e0b4ddb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0e0b4ddb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0e0b4ddb

Branch: refs/heads/4.x-HBase-1.1
Commit: 0e0b4ddb4d130b38c7aa28d2e31b0a9552087256
Parents: 1a2f2dc
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 11:27:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed May 27 13:20:32 2015 -0700

--
 phoenix-server/src/build/query-server-runnable.xml | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0e0b4ddb/phoenix-server/src/build/query-server-runnable.xml
--
diff --git a/phoenix-server/src/build/query-server-runnable.xml 
b/phoenix-server/src/build/query-server-runnable.xml
index e2a3dc4..ef22b14 100644
--- a/phoenix-server/src/build/query-server-runnable.xml
+++ b/phoenix-server/src/build/query-server-runnable.xml
@@ -28,6 +28,15 @@
 formatjar/format
   /formats
   includeBaseDirectoryfalse/includeBaseDirectory
+  containerDescriptorHandlers
+containerDescriptorHandler
+  !--
+  aggregate SPI's so that things like HDFS FileSystem works in uberjar
+  http://docs.oracle.com/javase/tutorial/sound/SPI-intro.html
+  --
+  handlerNamemetaInf-services/handlerName
+/containerDescriptorHandler
+  /containerDescriptorHandlers
   dependencySets
 dependencySet
   outputDirectory//outputDirectory



[24/31] phoenix git commit: PHOENIX-1968: Should support saving arrays

2015-06-24 Thread ndimiduk
PHOENIX-1968: Should support saving arrays


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f7d73496
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f7d73496
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f7d73496

Branch: refs/heads/4.x-HBase-1.1
Commit: f7d734966f7172c3bc4a6f0ba31594ba74ee91a1
Parents: bfd860f
Author: ravimagham ravimag...@apache.org
Authored: Thu Jun 11 12:59:48 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Thu Jun 11 12:59:48 2015 -0700

--
 .../apache/phoenix/spark/PhoenixSparkIT.scala   | 21 
 .../phoenix/spark/PhoenixRecordWritable.scala   | 25 
 2 files changed, 41 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7d73496/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
--
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 42e8676..5f256e6 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -415,4 +415,25 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
 
 results.toList shouldEqual checkResults
   }
+
+  test(Can save arrays back to phoenix) {
+val dataSet = List((2L, Array(String1, String2, String3)))
+
+sc
+  .parallelize(dataSet)
+  .saveToPhoenix(
+ARRAY_TEST_TABLE,
+Seq(ID,VCARRAY),
+zkUrl = Some(quorumAddress)
+  )
+
+// Load the results back
+val stmt = conn.createStatement()
+val rs = stmt.executeQuery(SELECT VCARRAY FROM ARRAY_TEST_TABLE WHERE ID 
= 2)
+rs.next()
+val sqlArray = rs.getArray(1).getArray().asInstanceOf[Array[String]]
+
+// Verify the arrays are equal
+sqlArray shouldEqual dataSet(0)._2
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7d73496/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
--
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
index 67e0bd2..3977657 100644
--- 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
+++ 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
@@ -16,11 +16,12 @@ package org.apache.phoenix.spark
 import java.sql.{PreparedStatement, ResultSet}
 import org.apache.hadoop.mapreduce.lib.db.DBWritable
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder
-import org.apache.phoenix.schema.types.{PDate, PhoenixArray}
+import org.apache.phoenix.schema.types.{PDataType, PDate, PhoenixArray}
 import org.joda.time.DateTime
 import scala.collection.{immutable, mutable}
 import scala.collection.JavaConversions._
 
+
 class PhoenixRecordWritable(var encodedColumns: String) extends DBWritable {
   val upsertValues = mutable.ArrayBuffer[Any]()
   val resultMap = mutable.Map[String, AnyRef]()
@@ -44,13 +45,27 @@ class PhoenixRecordWritable(var encodedColumns: String) 
extends DBWritable {
 upsertValues.zip(columns).zipWithIndex.foreach {
   case ((v, c), i) = {
 if (v != null) {
+
   // Both Java and Joda dates used to work in 4.2.3, but now they must 
be java.sql.Date
+  // Can override any other types here as needed
   val (finalObj, finalType) = v match {
-case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE.getSqlType)
-case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE.getSqlType)
-case _ = (v, c.getSqlType)
+case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE)
+case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE)
+case _ = (v, c.getPDataType)
+  }
+
+  // Save as array or object
+  finalObj match {
+case obj: Array[AnyRef] = {
+  // Create a java.sql.Array, need to lookup the base sql type name
+  val sqlArray = statement.getConnection.createArrayOf(
+PDataType.arrayBaseType(finalType).getSqlTypeName,
+obj
+  )
+  statement.setArray(i + 1, sqlArray)
+}
+case _ = statement.setObject(i + 1, finalObj)
   }
-  statement.setObject(i + 1, finalObj, finalType)
 } else {
   

[22/31] phoenix git commit: PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)

2015-06-24 Thread ndimiduk
PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/18b9e727
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/18b9e727
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/18b9e727

Branch: refs/heads/4.x-HBase-1.1
Commit: 18b9e72756642e127b2e227ea46a4f70401e6187
Parents: 58ee706
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Fri Jun 5 09:04:17 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Fri Jun 5 09:04:17 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 58 ++--
 phoenix-core/src/main/antlr3/PhoenixSQL.g   | 17 +++---
 2 files changed, 61 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/18b9e727/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 868e19d..c6bd62f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -58,6 +58,8 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.FunctionAlreadyExistsException;
 import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
@@ -121,11 +123,31 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(
ptr.set(PInteger.INSTANCE.toBytes((Integer)sum));\n)
 .append(return true;\n)
 .append(}\n).toString();
-
+private static String ARRAY_INDEX_EVALUATE_METHOD =
+new StringBuffer()
+.append(public boolean evaluate(Tuple tuple, 
ImmutableBytesWritable ptr) {\n)
+.append(Expression indexExpr = 
children.get(1);\n)
+.append(if (!indexExpr.evaluate(tuple, ptr)) {\n)
+.append(   return false;\n)
+.append(} else if (ptr.getLength() == 0) {\n)
+.append(   return true;\n)
+.append(}\n)
+.append(// Use Codec to prevent Integer object 
allocation\n)
+.append(int index = 
PInteger.INSTANCE.getCodec().decodeInt(ptr, indexExpr.getSortOrder());\n)
+.append(if(index  0) {\n)
+.append(   throw new ParseException(\Index 
cannot be negative :\ + index);\n)
+.append(}\n)
+.append(Expression arrayExpr = 
children.get(0);\n)
+.append(return 
PArrayDataType.positionAtArrayElement(tuple, ptr, index, arrayExpr, 
getDataType(),getMaxLength());\n)
+.append(}\n).toString();
+
+
 private static String MY_REVERSE_CLASS_NAME = MyReverse;
 private static String MY_SUM_CLASS_NAME = MySum;
-private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, PVarchar);
-private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, PInteger);
+private static String MY_ARRAY_INDEX_CLASS_NAME = MyArrayIndex;
+private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, return 
PVarchar.INSTANCE;);
+private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, return PInteger.INSTANCE;);
+private static String MY_ARRAY_INDEX_PROGRAM = 
getProgram(MY_ARRAY_INDEX_CLASS_NAME, ARRAY_INDEX_EVALUATE_METHOD, return 
PDataType.fromTypeId(children.get(0).getDataType().getSqlType()- 
PDataType.ARRAY_TYPE_BASE););
 private static Properties EMPTY_PROPS = new Properties();
 
 
@@ -144,6 +166,8 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(import org.apache.phoenix.schema.types.PInteger;\n)
 .append(import org.apache.phoenix.schema.types.PVarchar;\n)
 .append(import org.apache.phoenix.util.StringUtil;\n)
+.append(import 

[21/31] phoenix git commit: PHOENIX-777 - Support null value for fixed length ARRAY - Addendum (Ram)

2015-06-24 Thread ndimiduk
PHOENIX-777 - Support null value for fixed length ARRAY - Addendum (Ram)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/58ee7062
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/58ee7062
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/58ee7062

Branch: refs/heads/4.x-HBase-1.1
Commit: 58ee7062c624dd72a5cdaa41ec5b107a1e7b14c2
Parents: 6f890ad
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 14:32:02 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 14:36:05 2015 +0530

--
 .../main/java/org/apache/phoenix/schema/types/PTimestamp.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/58ee7062/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
index d396adc..16b110e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.DateUtil;
 
 public class PTimestamp extends PDataTypeTimestamp {
@@ -47,6 +48,10 @@ public class PTimestamp extends PDataTypeTimestamp {
   @Override
   public int toBytes(Object object, byte[] bytes, int offset) {
 if (object == null) {
+  // Create the byte[] of size MAX_TIMESTAMP_BYTES
+  if(bytes.length != getByteSize()) {
+  bytes = Bytes.padTail(bytes, (getByteSize() - bytes.length));
+  }
   PDate.INSTANCE.getCodec().encodeLong(0l, bytes, offset);
   Bytes.putInt(bytes, offset + Bytes.SIZEOF_LONG, 0);
   return getByteSize();



[28/31] phoenix git commit: PHOENIX-2029 Queries are making two rpc calls for getTable

2015-06-24 Thread ndimiduk
PHOENIX-2029 Queries are making two rpc calls for getTable


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/52f5b046
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/52f5b046
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/52f5b046

Branch: refs/heads/4.x-HBase-1.1
Commit: 52f5b04643914f33c2d00a1157ca767a32f1adb8
Parents: 43c722c
Author: Thomas D'Silva twdsi...@gmail.com
Authored: Mon Jun 8 15:30:40 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Wed Jun 17 11:16:48 2015 -0700

--
 .../org/apache/phoenix/rpc/UpdateCacheIT.java   | 139 +++
 .../apache/phoenix/compile/QueryCompiler.java   |   2 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   6 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  26 ++--
 4 files changed, 156 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/52f5b046/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
new file mode 100644
index 000..c657e41
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
@@ -0,0 +1,139 @@
+package org.apache.phoenix.rpc;
+
+import static org.apache.phoenix.util.TestUtil.INDEX_DATA_SCHEMA;
+import static org.apache.phoenix.util.TestUtil.MUTABLE_INDEX_DATA_TABLE;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.end2end.Shadower;
+import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
+import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.MetaDataClient;
+import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.types.PVarchar;
+import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.collect.Maps;
+
+/**
+ * Verifies the number of rpcs calls from {@link MetaDataClient} updateCache() 
+ * for transactional and non-transactional tables.
+ */
+public class UpdateCacheIT extends BaseHBaseManagedTimeIT {
+   
+   public static final int NUM_MILLIS_IN_DAY = 8640;
+
+@Before
+public void setUp() throws SQLException {
+ensureTableCreated(getUrl(), MUTABLE_INDEX_DATA_TABLE);
+}
+
+   @BeforeClass
+@Shadower(classBeingShadowed = BaseHBaseManagedTimeIT.class)
+public static void doSetup() throws Exception {
+MapString,String props = Maps.newHashMapWithExpectedSize(3);
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+   
+   public static void validateRowKeyColumns(ResultSet rs, int i) throws 
SQLException {
+   assertTrue(rs.next());
+   assertEquals(rs.getString(1), varchar + String.valueOf(i));
+   assertEquals(rs.getString(2), char + String.valueOf(i));
+   assertEquals(rs.getInt(3), i);
+   assertEquals(rs.getInt(4), i);
+   assertEquals(rs.getBigDecimal(5), new BigDecimal(i*0.5d));
+   Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+   assertEquals(rs.getDate(6), date);
+   }
+   
+   public static void setRowKeyColumns(PreparedStatement stmt, int i) 
throws SQLException {
+// insert row
+stmt.setString(1, varchar + String.valueOf(i));
+stmt.setString(2, char + String.valueOf(i));
+stmt.setInt(3, i);
+stmt.setLong(4, i);
+stmt.setBigDecimal(5, new BigDecimal(i*0.5d));
+Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+stmt.setDate(6, date);
+}
+   
+   @Test
+   public 

[19/31] phoenix git commit: PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level

2015-06-24 Thread ndimiduk
PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9c5f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9c5f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9c5f

Branch: refs/heads/4.x-HBase-1.1
Commit: 9c5fae456f3a0934e43e02af0ef5188b9337
Parents: 29ea503
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 15:58:32 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 15:57:15 2015 -0700

--
 .../java/org/apache/phoenix/filter/RowKeyComparisonFilter.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9c5f/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
index 2e2037b..b7de7ac 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
@@ -73,8 +73,9 @@ public class RowKeyComparisonFilter extends 
BooleanExpressionFilter {
 if (evaluate) {
 inputTuple.setKey(v.getRowArray(), v.getRowOffset(), 
v.getRowLength());
 this.keepRow = Boolean.TRUE.equals(evaluate(inputTuple));
-if (logger.isDebugEnabled()) {
-logger.debug(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)  +  row  + inputTuple);
+if (logger.isTraceEnabled()) {
+logger.trace(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)
++  row  + inputTuple);
 }
 evaluate = false;
 }



[20/31] phoenix git commit: PHOENIX-777 - Support null value for fixed length ARRAY (Dumindu Buddhika)

2015-06-24 Thread ndimiduk
PHOENIX-777 - Support null value for fixed length ARRAY (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6f890ade
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6f890ade
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6f890ade

Branch: refs/heads/4.x-HBase-1.1
Commit: 6f890ade0691d03469ff8fce81c2fa9edd6941af
Parents: 9c5f111
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 11:18:51 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 11:18:51 2015 +0530

--
 .../phoenix/end2end/ArraysWithNullsIT.java  | 300 +++
 .../phoenix/compile/ExpressionCompiler.java |   9 +-
 .../apache/phoenix/schema/types/PBinary.java|   2 +-
 .../org/apache/phoenix/schema/types/PChar.java  |   5 +-
 .../org/apache/phoenix/schema/types/PDate.java  |   6 +-
 .../apache/phoenix/schema/types/PDecimal.java   |   3 +
 .../apache/phoenix/schema/types/PTimestamp.java |  17 +-
 .../phoenix/schema/types/PhoenixArray.java  |  51 ++--
 8 files changed, 358 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f890ade/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
new file mode 100644
index 000..b034193
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertEquals;
+
+import java.sql.*;
+
+import org.apache.phoenix.schema.types.PTimestamp;
+import org.apache.phoenix.schema.types.PhoenixArray;
+import org.junit.Test;
+
+public class ArraysWithNullsIT extends BaseClientManagedTimeIT {
+
+@Test
+public void testArrayUpsertIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t1 ( k VARCHAR PRIMARY 
KEY, a INTEGER[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t1 
VALUES('a',ARRAY[null,3,null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t1 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(INTEGER,new Object[]{null,3,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+
+
+@Test
+public void testArrayUpsertVarcharWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t2 ( k VARCHAR PRIMARY 
KEY, a VARCHAR[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t2 
VALUES('a',ARRAY['10',null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t2 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(VARCHAR,new Object[]{10,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+@Test
+public void testArrayUpsertBigIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t3 ( k VARCHAR PRIMARY 
KEY, a BIGINT[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t3 
VALUES('a',ARRAY[2,null,32335,4]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t3 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(BIGINT,new 
Object[]{(long)2,null,(long)32335,(long)4});
+
+assertEquals(rs.getArray(1),array);
+conn.close();

[30/31] phoenix git commit: PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing (Alicia Ying Shu)

2015-06-24 Thread ndimiduk
PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing 
(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/05b1b8b1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/05b1b8b1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/05b1b8b1

Branch: refs/heads/4.x-HBase-1.1
Commit: 05b1b8b13f4137602567f67642946c883646d4d8
Parents: 329d749
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:28:35 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:31:28 2015 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java  | 2 ++
 phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java | 3 +++
 .../src/test/java/org/apache/phoenix/query/BaseTest.java| 5 -
 3 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index b9d7180..3140077 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -98,6 +98,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(1, rs.getInt(1));
 assertEquals(121, rs.getInt(2));
 assertFalse(rs.next());
+conn.close();
 }
 
 protected void testUpdatableViewIndex(Integer saltBuckets) throws 
Exception {
@@ -179,6 +180,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 + CLIENT MERGE SORT,
 QueryUtil.getExplainPlan(rs));
 }
+conn.close();
 }
 
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 266438d..fb58a8f 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -92,8 +92,11 @@ public class ViewIT extends BaseViewIT {
 fail();
 } catch (ReadOnlyTableException e) {
 
+} finally {
+conn.close();
 }
 
+conn = DriverManager.getConnection(getUrl());
 int count = 0;
 ResultSet rs = conn.createStatement().executeQuery(SELECT k FROM v2);
 while (rs.next()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index fa78656..3f09518 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -115,6 +115,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -1634,7 +1635,9 @@ public abstract class BaseTest {
 for (HTableDescriptor table : tables) {
 String schemaName = 
SchemaUtil.getSchemaNameFromFullName(table.getName());
 if (!QueryConstants.SYSTEM_SCHEMA_NAME.equals(schemaName)) {
-admin.disableTable(table.getName());
+try{
+admin.disableTable(table.getName());
+} catch (TableNotEnabledException ignored){}
 admin.deleteTable(table.getName());
 }
 }



[15/31] phoenix git commit: PHOENIX-2007 java.sql.SQLException: Encountered exception in sub plan [0] execution(Alicia Ying Shu)

2015-06-24 Thread ndimiduk
PHOENIX-2007 java.sql.SQLException: Encountered exception in sub plan [0] 
execution(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/eb9452d5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/eb9452d5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/eb9452d5

Branch: refs/heads/4.x-HBase-1.1
Commit: eb9452d55068ff4574b48938aebba765c28caaaf
Parents: c1882ee
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon Jun 1 21:05:24 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon Jun 1 21:05:24 2015 +0530

--
 .../org/apache/phoenix/end2end/HashJoinIT.java  | 54 
 .../apache/phoenix/execute/HashJoinPlan.java|  7 +--
 2 files changed, 58 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/eb9452d5/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
index a03204a..88e03ca 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
@@ -3813,6 +3813,60 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
 }
 }
 
+@Test
+public void testSubqueryWithoutData() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+
+try {
+String GRAMMAR_TABLE = CREATE TABLE IF NOT EXISTS GRAMMAR_TABLE 
(ID INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String LARGE_TABLE = CREATE TABLE IF NOT EXISTS LARGE_TABLE (ID 
INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String SECONDARY_LARGE_TABLE = CREATE TABLE IF NOT EXISTS 
SECONDARY_LARGE_TABLE (SEC_ID INTEGER PRIMARY KEY, +
+sec_unsig_id UNSIGNED_INT, sec_big_id BIGINT, 
sec_usnig_long_id UNSIGNED_LONG, sec_tiny_id TINYINT, + 
+sec_unsig_tiny_id UNSIGNED_TINYINT, sec_small_id 
SMALLINT, sec_unsig_small_id UNSIGNED_SMALLINT, + 
+sec_float_id FLOAT, sec_unsig_float_id UNSIGNED_FLOAT, 
sec_double_id DOUBLE, sec_unsig_double_id UNSIGNED_DOUBLE, +
+sec_decimal_id DECIMAL, sec_boolean_id BOOLEAN, 
sec_time_id TIME, sec_date_id DATE, +
+sec_timestamp_id TIMESTAMP, sec_unsig_time_id TIME, 
sec_unsig_date_id DATE, sec_unsig_timestamp_id TIMESTAMP, +
+sec_varchar_id VARCHAR (30), sec_char_id CHAR (30), 
sec_binary_id BINARY (100), sec_varbinary_id VARBINARY (100));
+createTestTable(getUrl(), GRAMMAR_TABLE);
+createTestTable(getUrl(), LARGE_TABLE);
+createTestTable(getUrl(), SECONDARY_LARGE_TABLE);
+
+String ddl = SELECT * FROM (SELECT ID, BIG_ID, DATE_ID FROM 
LARGE_TABLE AS A WHERE (A.ID % 5) = 0) AS A  +
+INNER JOIN (SELECT SEC_ID, SEC_TINY_ID, 
SEC_UNSIG_FLOAT_ID FROM SECONDARY_LARGE_TABLE AS B WHERE (B.SEC_ID % 5) = 0) AS 
B  + 
+ON A.ID=B.SEC_ID WHERE A.DATE_ID  ALL (SELECT 
SEC_DATE_ID FROM SECONDARY_LARGE_TABLE LIMIT 100)  +  
+AND B.SEC_UNSIG_FLOAT_ID = ANY (SELECT sec_unsig_float_id 
FROM 

[05/31] phoenix git commit: PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-06-24 Thread ndimiduk
PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/56e1c0a1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/56e1c0a1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/56e1c0a1

Branch: refs/heads/4.x-HBase-1.1
Commit: 56e1c0a1f348572fb73e9d0b8bbfb053df7f8710
Parents: ea622d5
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sat May 23 23:29:31 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sat May 23 23:29:31 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/56e1c0a1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..4aa28c4 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,6 +620,8 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
+// Remove this configuration once hbase has HBASE-13756 fix.
+conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[01/31] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-06-24 Thread ndimiduk
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 [created] 980d29c5a


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
index 272cac6..e7e1dd7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
@@ -111,7 +111,7 @@ public class StatisticsCollector {
 this.statsTable.close();
 }
 
-public void updateStatistic(HRegion region) {
+public void updateStatistic(Region region) {
 try {
 ArrayListMutation mutations = new ArrayListMutation();
 writeStatsToStatsTable(region, true, mutations, 
TimeKeeper.SYSTEM.getCurrentTime());
@@ -126,7 +126,7 @@ public class StatisticsCollector {
 }
 }
 
-private void writeStatsToStatsTable(final HRegion region,
+private void writeStatsToStatsTable(final Region region,
 boolean delete, ListMutation mutations, long currentTime) throws 
IOException {
 try {
 // update the statistics table
@@ -215,7 +215,7 @@ public class StatisticsCollector {
 }
 }
 
-public InternalScanner createCompactionScanner(HRegion region, Store 
store, InternalScanner s) throws IOException {
+public InternalScanner createCompactionScanner(Region region, Store store, 
InternalScanner s) throws IOException {
 // See if this is for Major compaction
 if (logger.isDebugEnabled()) {
 logger.debug(Compaction scanner created for stats);
@@ -224,13 +224,13 @@ public class StatisticsCollector {
 return getInternalScanner(region, store, s, cfKey);
 }
 
-public void splitStats(HRegion parent, HRegion left, HRegion right) {
+public void splitStats(Region parent, Region left, Region right) {
 try {
 if (logger.isDebugEnabled()) {
 logger.debug(Collecting stats for split of  + 
parent.getRegionInfo() +  into  + left.getRegionInfo() +  and  + 
right.getRegionInfo());
 }
 ListMutation mutations = Lists.newArrayListWithExpectedSize(3);
-for (byte[] fam : parent.getStores().keySet()) {
+for (byte[] fam : parent.getTableDesc().getFamiliesKeys()) {
statsTable.splitStats(parent, left, right, this, new 
ImmutableBytesPtr(fam), mutations);
 }
 if (logger.isDebugEnabled()) {
@@ -243,7 +243,7 @@ public class StatisticsCollector {
 }
 }
 
-protected InternalScanner getInternalScanner(HRegion region, Store store,
+protected InternalScanner getInternalScanner(Region region, Store store,
 InternalScanner internalScan, ImmutableBytesPtr family) {
 return new StatisticsScanner(this, statsTable, region, internalScan, 
family);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
index 0e50923..582c4de 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
@@ -26,9 +26,9 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 
 /**
@@ -38,11 +38,11 @@ public class StatisticsScanner implements InternalScanner {
 private static final Log LOG = 

[09/31] phoenix git commit: Revert PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-06-24 Thread ndimiduk
Revert PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)

This reverts commit 56e1c0a1f348572fb73e9d0b8bbfb053df7f8710.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1a2f2dc1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1a2f2dc1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1a2f2dc1

Branch: refs/heads/4.x-HBase-1.1
Commit: 1a2f2dc1ea6dd75c224b78a0dc2b312b0e6f5bce
Parents: 5546a42
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed May 27 14:52:25 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed May 27 14:52:25 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1a2f2dc1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 4aa28c4..54ae670 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,8 +620,6 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
-// Remove this configuration once hbase has HBASE-13756 fix.
-conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[16/31] phoenix git commit: PHOENIX-2016 Some Phoenix tests failed with NPE(Alicia Ying Shu)

2015-06-24 Thread ndimiduk
PHOENIX-2016 Some Phoenix tests failed with NPE(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dc46b144
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dc46b144
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dc46b144

Branch: refs/heads/4.x-HBase-1.1
Commit: dc46b144aa9eaf315c3969669dab7f0a50d94281
Parents: eb9452d
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon Jun 1 21:34:16 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon Jun 1 21:34:16 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dc46b144/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index b0574c3..fa78656 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -1627,6 +1627,7 @@ public abstract class BaseTest {
  * Disable and drop all the tables except SYSTEM.CATALOG and 
SYSTEM.SEQUENCE
  */
 private static void disableAndDropNonSystemTables() throws Exception {
+if (driver == null) return;
 HBaseAdmin admin = driver.getConnectionQueryServices(null, 
null).getAdmin();
 try {
 HTableDescriptor[] tables = admin.listTables();



[08/31] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)

2015-06-24 Thread ndimiduk
PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5546a422
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5546a422
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5546a422

Branch: refs/heads/4.x-HBase-1.1
Commit: 5546a42226e3f0fdf0cc89f1c175ff3da7a75d8c
Parents: c6b37b9
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 17:41:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 17:52:24 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java |  2 +-
 .../java/org/apache/phoenix/util/QueryUtil.java |  2 +-
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 20 
 3 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 2451603..3cfaacc 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -209,7 +209,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 url = url == null ?  : url;
 url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
 ? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
-: url;
+: PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + url;
 StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
index bd38983..a2d4a91 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
@@ -290,7 +290,7 @@ public final class QueryUtil {
 throws ClassNotFoundException,
 SQLException {
 String url = getConnectionUrl(props, conf);
-LOG.info(Creating connection with the jdbc url: + url);
+LOG.info(Creating connection with the jdbc url:  + url);
 PropertiesUtil.extractProperties(props, conf);
 return DriverManager.getConnection(url, props);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
index 083b205..4eda825 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
@@ -34,23 +34,33 @@ public class PhoenixEmbeddedDriverTest {
 @Test
 public void testGetConnectionInfo() throws SQLException {
 String[] urls = new String[] {
+null,
+,
 jdbc:phoenix,
 jdbc:phoenix;test=true,
 jdbc:phoenix:localhost,
+localhost,
+localhost;,
 jdbc:phoenix:localhost:123,
 jdbc:phoenix:localhost:123;foo=bar,
+localhost:123,
 jdbc:phoenix:localhost:123:/hbase,
 jdbc:phoenix:localhost:123:/foo-bar,
 jdbc:phoenix:localhost:123:/foo-bar;foo=bas,
+localhost:123:/foo-bar,
 jdbc:phoenix:localhost:/hbase,
 jdbc:phoenix:localhost:/foo-bar,
 jdbc:phoenix:localhost:/foo-bar;test=true,
+localhost:/foo-bar,
 jdbc:phoenix:v1,v2,v3,
 jdbc:phoenix:v1,v2,v3;,
 jdbc:phoenix:v1,v2,v3;test=true,
+v1,v2,v3,
 jdbc:phoenix:v1,v2,v3:/hbase,
 jdbc:phoenix:v1,v2,v3:/hbase;test=true,
+v1,v2,v3:/foo-bar,
 jdbc:phoenix:v1,v2,v3:123:/hbase,
+v1,v2,v3:123:/hbase,
 jdbc:phoenix:v1,v2,v3:123:/hbase;test=false,
 

[23/31] phoenix git commit: PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia Ying Shu)

2015-06-24 Thread ndimiduk
PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bfd860ff
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bfd860ff
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bfd860ff

Branch: refs/heads/4.x-HBase-1.1
Commit: bfd860ffec62a784f1229997cf98892ea3c0592d
Parents: 18b9e72
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed Jun 10 01:01:29 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed Jun 10 01:01:29 2015 +0530

--
 .../apache/phoenix/end2end/SortMergeJoinIT.java | 54 
 .../phoenix/execute/SortMergeJoinPlan.java  |  4 +-
 2 files changed, 56 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bfd860ff/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
index 6f14a45..8b65ab3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
@@ -2658,5 +2658,59 @@ public class SortMergeJoinIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+@Test
+public void testSubqueryWithoutData() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+
+try {
+String GRAMMAR_TABLE = CREATE TABLE IF NOT EXISTS GRAMMAR_TABLE 
(ID INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String LARGE_TABLE = CREATE TABLE IF NOT EXISTS LARGE_TABLE (ID 
INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String SECONDARY_LARGE_TABLE = CREATE TABLE IF NOT EXISTS 
SECONDARY_LARGE_TABLE (SEC_ID INTEGER PRIMARY KEY, +
+sec_unsig_id UNSIGNED_INT, sec_big_id BIGINT, 
sec_usnig_long_id UNSIGNED_LONG, sec_tiny_id TINYINT, + 
+sec_unsig_tiny_id UNSIGNED_TINYINT, sec_small_id 
SMALLINT, sec_unsig_small_id UNSIGNED_SMALLINT, + 
+sec_float_id FLOAT, sec_unsig_float_id UNSIGNED_FLOAT, 
sec_double_id DOUBLE, sec_unsig_double_id UNSIGNED_DOUBLE, +
+sec_decimal_id DECIMAL, sec_boolean_id BOOLEAN, 
sec_time_id TIME, sec_date_id DATE, +
+sec_timestamp_id TIMESTAMP, sec_unsig_time_id TIME, 
sec_unsig_date_id DATE, sec_unsig_timestamp_id TIMESTAMP, +
+sec_varchar_id VARCHAR (30), sec_char_id CHAR (30), 
sec_binary_id BINARY (100), sec_varbinary_id VARBINARY (100));
+createTestTable(getUrl(), GRAMMAR_TABLE);
+createTestTable(getUrl(), LARGE_TABLE);
+createTestTable(getUrl(), SECONDARY_LARGE_TABLE);
+
+String ddl = SELECT /*+USE_SORT_MERGE_JOIN*/ * FROM (SELECT ID, 
BIG_ID, DATE_ID FROM LARGE_TABLE AS A WHERE (A.ID % 5) = 0) AS A  +
+INNER JOIN (SELECT SEC_ID, SEC_TINY_ID, 
SEC_UNSIG_FLOAT_ID FROM SECONDARY_LARGE_TABLE AS B WHERE (B.SEC_ID % 5) = 0) AS 
B  + 
+ON A.ID=B.SEC_ID WHERE A.DATE_ID  ALL (SELECT 
SEC_DATE_ID FROM SECONDARY_LARGE_TABLE LIMIT 100)  +  
+AND 

[47/49] phoenix git commit: PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types

2015-06-24 Thread tdsilva
PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data 
types


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8a0dee77
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8a0dee77
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8a0dee77

Branch: refs/heads/json
Commit: 8a0dee77c67761c57feae31350c84304ccc44c07
Parents: 8076126
Author: Prashant Kommireddi 
pkommire...@pkommireddi-ltm.internal.salesforce.com
Authored: Mon May 18 19:47:01 2015 -0700
Committer: Eli Levine elilev...@apache.org
Committed: Mon Jun 15 18:17:45 2015 -0700

--
 .../org/apache/phoenix/pig/util/TypeUtil.java   | 24 ++--
 .../apache/phoenix/pig/util/TypeUtilTest.java   | 20 
 2 files changed, 37 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a0dee77/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
index 6549445..c8bc9d8 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
@@ -1,11 +1,21 @@
 /*
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE
- * file distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the License); you may not 
use this file except in compliance with the
- * License. You may obtain a copy of the License at 
http://www.apache.org/licenses/LICENSE-2.0 Unless required by
- * applicable law or agreed to in writing, software distributed under the 
License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
See the License for the specific language
- * governing permissions and limitations under the License.
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ *distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you maynot use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicablelaw or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
 package org.apache.phoenix.pig.util;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a0dee77/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
--
diff --git 
a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java 
b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
index 25d9f48..56167f6 100644
--- a/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
+++ b/phoenix-pig/src/test/java/org/apache/phoenix/pig/util/TypeUtilTest.java
@@ -1,3 +1,23 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ *distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you maynot use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicablelaw or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.phoenix.pig.util;
 
 import static org.junit.Assert.assertEquals;



[18/49] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-06-24 Thread tdsilva
PHOENIX-1763 Support building with HBase-1.1.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7bc9cce1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7bc9cce1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7bc9cce1

Branch: refs/heads/json
Commit: 7bc9cce172b2b1cebd00275a0f2c586944709231
Parents: 286ff26
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:08:26 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Thu May 21 23:33:48 2015 -0700

--
 phoenix-core/pom.xml| 17 +++--
 .../regionserver/IndexHalfStoreFileReader.java  | 31 ++--
 .../regionserver/IndexSplitTransaction.java | 39 --
 .../hbase/regionserver/LocalIndexMerger.java|  3 +-
 .../cache/aggcache/SpillableGroupByCache.java   | 13 +++-
 .../phoenix/coprocessor/BaseRegionScanner.java  | 12 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 77 +++-
 .../coprocessor/DelegateRegionScanner.java  | 23 --
 .../GroupedAggregateRegionObserver.java | 53 --
 .../coprocessor/HashJoinRegionScanner.java  | 60 ---
 .../coprocessor/MetaDataRegionObserver.java | 23 +++---
 .../phoenix/coprocessor/ScanRegionObserver.java | 11 ++-
 .../UngroupedAggregateRegionObserver.java   | 55 +++---
 .../hbase/index/covered/data/LocalTable.java|  2 +-
 .../index/scanner/FilteredKeyValueScanner.java  |  2 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  6 +-
 .../iterate/RegionScannerResultIterator.java|  9 ++-
 .../phoenix/schema/stats/StatisticsScanner.java | 10 ++-
 .../hbase/ipc/PhoenixIndexRpcSchedulerTest.java |  6 +-
 .../index/covered/TestLocalTableState.java  |  1 -
 .../index/write/TestWALRecoveryCaching.java |  4 +-
 phoenix-flume/pom.xml   |  9 ---
 phoenix-pig/pom.xml | 31 +---
 phoenix-spark/pom.xml   |  7 ++
 pom.xml | 41 ++-
 25 files changed, 352 insertions(+), 193 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7bc9cce1/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 65e4f8e..9ab2a0e 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -354,16 +354,25 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-annotations/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
 /dependency
 dependency
@@ -373,18 +382,16 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
   typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
@@ -395,13 +402,11 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7bc9cce1/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
index 49e2022..9befc8c 100644
--- 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
+++ 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
@@ -47,11 +47,11 @@ 

[20/49] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-06-24 Thread tdsilva
PHOENIX-1681 Use the new Region Interface (Andrew Purtell)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/edff624f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/edff624f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/edff624f

Branch: refs/heads/json
Commit: edff624f193324762fae04907c551e3d2fec93a3
Parents: 7bc9cce
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:22:54 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:16:31 2015 -0700

--
 ...ReplayWithIndexWritesAndCompressedWALIT.java |  4 +-
 .../EndToEndCoveredColumnsIndexBuilderIT.java   |  4 +-
 .../IndexHalfStoreFileReaderGenerator.java  |  9 +-
 .../regionserver/IndexSplitTransaction.java | 65 +-
 .../hbase/regionserver/LocalIndexMerger.java| 16 ++--
 .../hbase/regionserver/LocalIndexSplitter.java  | 11 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 26 +++---
 .../GroupedAggregateRegionObserver.java | 13 +--
 .../coprocessor/MetaDataEndpointImpl.java   | 94 ++--
 .../phoenix/coprocessor/ScanRegionObserver.java | 17 ++--
 .../coprocessor/SequenceRegionObserver.java | 16 ++--
 .../UngroupedAggregateRegionObserver.java   | 29 +++---
 .../hbase/index/covered/data/LocalTable.java|  5 +-
 .../write/ParallelWriterIndexCommitter.java |  8 +-
 .../recovery/PerRegionIndexWriteCache.java  | 10 +--
 .../recovery/StoreFailuresInCachePolicy.java|  4 +-
 .../TrackingParallelWriterIndexCommitter.java   |  8 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  4 +-
 .../apache/phoenix/index/PhoenixIndexCodec.java | 14 ++-
 .../schema/stats/StatisticsCollector.java   | 14 +--
 .../phoenix/schema/stats/StatisticsScanner.java | 16 ++--
 .../phoenix/schema/stats/StatisticsWriter.java  | 16 ++--
 .../java/org/apache/phoenix/util/IndexUtil.java | 38 
 .../index/covered/TestLocalTableState.java  |  8 +-
 .../index/write/TestWALRecoveryCaching.java | 17 ++--
 .../recovery/TestPerRegionIndexWriteCache.java  |  6 +-
 26 files changed, 230 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/edff624f/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 3b8ff29..611ba68 100644
--- 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -159,7 +159,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
   }
 
   /**
-   * Test writing edits into an HRegion, closing it, splitting logs, opening 
Region again. Verify
+   * Test writing edits into an region, closing it, splitting logs, opening 
Region again. Verify
* seqids.
* @throws Exception on failure
*/
@@ -183,7 +183,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
 builder.build(htd);
 
 // create the region + its WAL
-HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, htd);
+HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, 
htd); // FIXME: Uses private type
 region0.close();
 region0.getWAL().close();
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/edff624f/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
index d90733f..6b2309e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.util.EnvironmentEdge;
 import 

[32/49] phoenix git commit: PHOENIX-777 Support null value for fixed length ARRAY (Dumindu Buddhika)

2015-06-24 Thread tdsilva
PHOENIX-777 Support null value for fixed length ARRAY (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7f6bf10b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7f6bf10b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7f6bf10b

Branch: refs/heads/json
Commit: 7f6bf10b2cc54279b9210772323dc8f4d2939a19
Parents: 9e686b7
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 11:13:44 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 11:13:44 2015 +0530

--
 .../phoenix/end2end/ArraysWithNullsIT.java  | 300 +++
 .../phoenix/compile/ExpressionCompiler.java |   9 +-
 .../apache/phoenix/schema/types/PBinary.java|   2 +-
 .../org/apache/phoenix/schema/types/PChar.java  |   5 +-
 .../org/apache/phoenix/schema/types/PDate.java  |   6 +-
 .../apache/phoenix/schema/types/PDecimal.java   |   3 +
 .../apache/phoenix/schema/types/PTimestamp.java |  17 +-
 .../phoenix/schema/types/PhoenixArray.java  |  51 ++--
 8 files changed, 358 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7f6bf10b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
new file mode 100644
index 000..b034193
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertEquals;
+
+import java.sql.*;
+
+import org.apache.phoenix.schema.types.PTimestamp;
+import org.apache.phoenix.schema.types.PhoenixArray;
+import org.junit.Test;
+
+public class ArraysWithNullsIT extends BaseClientManagedTimeIT {
+
+@Test
+public void testArrayUpsertIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t1 ( k VARCHAR PRIMARY 
KEY, a INTEGER[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t1 
VALUES('a',ARRAY[null,3,null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t1 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(INTEGER,new Object[]{null,3,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+
+
+@Test
+public void testArrayUpsertVarcharWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t2 ( k VARCHAR PRIMARY 
KEY, a VARCHAR[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t2 
VALUES('a',ARRAY['10',null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t2 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(VARCHAR,new Object[]{10,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+@Test
+public void testArrayUpsertBigIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t3 ( k VARCHAR PRIMARY 
KEY, a BIGINT[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t3 
VALUES('a',ARRAY[2,null,32335,4]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t3 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(BIGINT,new 
Object[]{(long)2,null,(long)32335,(long)4});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+

[13/49] phoenix git commit: PHOENIX-1979 Remove unused FamilyOnlyFilter

2015-06-24 Thread tdsilva
PHOENIX-1979 Remove unused FamilyOnlyFilter


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a4b4e0e2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a4b4e0e2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a4b4e0e2

Branch: refs/heads/json
Commit: a4b4e0e2d862d5d4ee0f3a6f9587f53fe87d629f
Parents: c83ab9e
Author: Andrew Purtell apurt...@apache.org
Authored: Wed May 20 09:53:53 2015 -0700
Committer: Andrew Purtell apurt...@apache.org
Committed: Wed May 20 09:53:53 2015 -0700

--
 .../index/covered/filter/FamilyOnlyFilter.java  |  80 --
 .../covered/filter/TestFamilyOnlyFilter.java| 106 ---
 2 files changed, 186 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4b4e0e2/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/FamilyOnlyFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/FamilyOnlyFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/FamilyOnlyFilter.java
deleted file mode 100644
index 68555ef..000
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/FamilyOnlyFilter.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.hbase.index.covered.filter;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.filter.ByteArrayComparable;
-import org.apache.hadoop.hbase.filter.FamilyFilter;
-
-/**
- * Similar to the {@link FamilyFilter} but stops when the end of the family is 
reached and only
- * supports equality
- */
-public class FamilyOnlyFilter extends FamilyFilter {
-
-  boolean done = false;
-  private boolean previousMatchFound;
-
-  /**
-   * Filter on exact binary matches to the passed family
-   * @param family to compare against
-   */
-  public FamilyOnlyFilter(final byte[] family) {
-this(new BinaryComparator(family));
-  }
-
-  public FamilyOnlyFilter(final ByteArrayComparable familyComparator) {
-super(CompareOp.EQUAL, familyComparator);
-  }
-
-
-  @Override
-  public boolean filterAllRemaining() {
-return done;
-  }
-
-  @Override
-  public void reset() {
-done = false;
-previousMatchFound = false;
-  }
-
-  @Override
-  public ReturnCode filterKeyValue(Cell v) {
-if (done) {
-  return ReturnCode.SKIP;
-}
-ReturnCode code = super.filterKeyValue(v);
-if (previousMatchFound) {
-  // we found a match before, and now we are skipping the key because of 
the family, therefore
-  // we are done (no more of the family).
-  if (code.equals(ReturnCode.SKIP)) {
-  done = true;
-  }
-} else {
-  // if we haven't seen a match before, then it doesn't matter what we see 
now, except to mark
-  // if we've seen a match
-  if (code.equals(ReturnCode.INCLUDE)) {
-previousMatchFound = true;
-  }
-}
-return code;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4b4e0e2/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/filter/TestFamilyOnlyFilter.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/filter/TestFamilyOnlyFilter.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/filter/TestFamilyOnlyFilter.java
deleted file mode 100644
index 216f548..000
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/filter/TestFamilyOnlyFilter.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- 

phoenix git commit: PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to create column with no name (Lukas Lalinsky)

2015-06-24 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 7385899d9 - b58a62a5e


PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to 
create column with no name (Lukas Lalinsky)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b58a62a5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b58a62a5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b58a62a5

Branch: refs/heads/master
Commit: b58a62a5e43dcbb37695a0ebf7a20ced13e99503
Parents: 7385899
Author: James Taylor jtay...@salesforce.com
Authored: Wed Jun 24 08:11:12 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Wed Jun 24 08:11:12 2015 -0700

--
 .../phoenix/query/ConnectionQueryServicesImpl.java| 14 +-
 1 file changed, 9 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b58a62a5/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index c5dde10..ddebf9f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1941,11 +1941,15 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 columnsToAdd += ,  + 
PhoenixDatabaseMetaData.INDEX_TYPE +   + 
PUnsignedTinyint.INSTANCE.getSqlTypeName()
 + ,  + 
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP +   + 
PLong.INSTANCE.getSqlTypeName();
 }
-// Ugh..need to assign to another local 
variable to keep eclipse happy.
-PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
-PhoenixDatabaseMetaData.SYSTEM_CATALOG,
-
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
-metaConnection = newMetaConnection;
+
+// If we have some new columns from 4.1-4.3 to 
add, add them now.
+if (!columnsToAdd.isEmpty()) {
+// Ugh..need to assign to another local 
variable to keep eclipse happy.
+PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
+
PhoenixDatabaseMetaData.SYSTEM_CATALOG,
+
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
+metaConnection = newMetaConnection;
+}
 
 if (currentServerSideTableTimeStamp  
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) {
 columnsToAdd = 
PhoenixDatabaseMetaData.BASE_COLUMN_COUNT +  



phoenix git commit: PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to create column with no name (Lukas Lalinsky)

2015-06-24 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 ee9764f9c - 2db9d6d65


PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to 
create column with no name (Lukas Lalinsky)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2db9d6d6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2db9d6d6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2db9d6d6

Branch: refs/heads/4.x-HBase-1.0
Commit: 2db9d6d652a6f94f743a982a5a381999a9e466f1
Parents: ee9764f
Author: James Taylor jtay...@salesforce.com
Authored: Wed Jun 24 08:11:12 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Wed Jun 24 08:14:29 2015 -0700

--
 .../phoenix/query/ConnectionQueryServicesImpl.java| 14 +-
 1 file changed, 9 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2db9d6d6/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index c5dde10..ddebf9f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1941,11 +1941,15 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 columnsToAdd += ,  + 
PhoenixDatabaseMetaData.INDEX_TYPE +   + 
PUnsignedTinyint.INSTANCE.getSqlTypeName()
 + ,  + 
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP +   + 
PLong.INSTANCE.getSqlTypeName();
 }
-// Ugh..need to assign to another local 
variable to keep eclipse happy.
-PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
-PhoenixDatabaseMetaData.SYSTEM_CATALOG,
-
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
-metaConnection = newMetaConnection;
+
+// If we have some new columns from 4.1-4.3 to 
add, add them now.
+if (!columnsToAdd.isEmpty()) {
+// Ugh..need to assign to another local 
variable to keep eclipse happy.
+PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
+
PhoenixDatabaseMetaData.SYSTEM_CATALOG,
+
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
+metaConnection = newMetaConnection;
+}
 
 if (currentServerSideTableTimeStamp  
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) {
 columnsToAdd = 
PhoenixDatabaseMetaData.BASE_COLUMN_COUNT +  



phoenix git commit: PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to create column with no name (Lukas Lalinsky)

2015-06-24 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 6e56eddc9 - a9e040184


PHOENIX-2066 Existing client fails initialization due to upgrade atttempting to 
create column with no name (Lukas Lalinsky)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a9e04018
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a9e04018
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a9e04018

Branch: refs/heads/4.x-HBase-0.98
Commit: a9e040184d2af3c72e125115cf6d755d52ad67ff
Parents: 6e56edd
Author: James Taylor jtay...@salesforce.com
Authored: Wed Jun 24 08:11:12 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Wed Jun 24 08:17:16 2015 -0700

--
 .../phoenix/query/ConnectionQueryServicesImpl.java| 14 +-
 1 file changed, 9 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a9e04018/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index f3be8f2..28b47aa 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -1941,11 +1941,15 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 columnsToAdd += ,  + 
PhoenixDatabaseMetaData.INDEX_TYPE +   + 
PUnsignedTinyint.INSTANCE.getSqlTypeName()
 + ,  + 
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP +   + 
PLong.INSTANCE.getSqlTypeName();
 }
-// Ugh..need to assign to another local 
variable to keep eclipse happy.
-PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
-PhoenixDatabaseMetaData.SYSTEM_CATALOG,
-
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
-metaConnection = newMetaConnection;
+
+// If we have some new columns from 4.1-4.3 to 
add, add them now.
+if (!columnsToAdd.isEmpty()) {
+// Ugh..need to assign to another local 
variable to keep eclipse happy.
+PhoenixConnection newMetaConnection = 
addColumnsIfNotExists(metaConnection,
+
PhoenixDatabaseMetaData.SYSTEM_CATALOG,
+
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP, columnsToAdd);
+metaConnection = newMetaConnection;
+}
 
 if (currentServerSideTableTimeStamp  
MetaDataProtocol.MIN_SYSTEM_TABLE_TIMESTAMP_4_5_0) {
 columnsToAdd = 
PhoenixDatabaseMetaData.BASE_COLUMN_COUNT +  



phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/master 50f3a0412 - 3cf22a7de


PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3cf22a7d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3cf22a7d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3cf22a7d

Branch: refs/heads/master
Commit: 3cf22a7de4eaec6978763b6961d73aa9eaa07015
Parents: 50f3a04
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:16:51 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:16:51 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cf22a7d/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cf22a7d/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 9e27bac..5270277 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 37220bbab - f2b612e32


PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f2b612e3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f2b612e3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f2b612e3

Branch: refs/heads/4.x-HBase-1.0
Commit: f2b612e325c1837916ca16ffa62f557e9019696a
Parents: 37220bb
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:17:26 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:17:26 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2b612e3/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2b612e3/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 9e27bac..5270277 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.0 a496896fc - cbd63ca2e


PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cbd63ca2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cbd63ca2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cbd63ca2

Branch: refs/heads/4.4-HBase-1.0
Commit: cbd63ca2eeb9351a53be50b3a52739f32886cf71
Parents: a496896
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:18:53 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:18:53 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cbd63ca2/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/cbd63ca2/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 7afde98..83aefaa 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 618213be6 - 8e309c2b4


PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8e309c2b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8e309c2b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8e309c2b

Branch: refs/heads/4.x-HBase-0.98
Commit: 8e309c2b4cd4bb8a2689fbaed9f9c7b348cbae31
Parents: 618213b
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:17:49 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:17:49 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8e309c2b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8e309c2b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 9e27bac..5270277 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.1 f4b92cd14 - 4852cee55


PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4852cee5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4852cee5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4852cee5

Branch: refs/heads/4.4-HBase-1.1
Commit: 4852cee557ee09641fcb101def8585bdf9269f20
Parents: f4b92cd
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:18:27 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:18:27 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/4852cee5/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/4852cee5/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 7afde98..83aefaa 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



phoenix git commit: PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-0.98 a07c8e37b - 78f5915f9


PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output 
directory suffix(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/78f5915f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/78f5915f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/78f5915f

Branch: refs/heads/4.4-HBase-0.98
Commit: 78f5915f92c824fd3146a1e9288cb1febb9aa9a0
Parents: a07c8e3
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 01:19:18 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 01:19:18 2015 +0530

--
 .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++
 .../phoenix/mapreduce/CsvBulkLoadTool.java   |  2 +-
 2 files changed, 20 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/78f5915f/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
index 392395d..6bcc221 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java
@@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT {
 String ddl = CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 
 +  (FIRST_NAME ASC);
 stmt.execute(ddl);
+ddl = CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6  +  (LAST_NAME ASC);
+stmt.execute(ddl);
 
 FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path(/tmp/input3.csv));
@@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT {
 assertEquals(FirstName 2, rs.getString(2));
 
 rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 
where first_name='FirstName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32768,'FirstName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
+rs.close();
+rs = stmt.executeQuery(SELECT id, LAST_NAME FROM TABLE6 where 
last_name='LastName 2');
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(LastName 2, rs.getString(2));
+rs.close();
+rs =
+stmt.executeQuery(EXPLAIN SELECT id, LAST_NAME FROM TABLE6 
where last_name='LastName 2');
+assertEquals(
+CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 
[-32767,'LastName 2']\n
++ SERVER FILTER BY FIRST KEY ONLY, 
QueryUtil.getExplainPlan(rs));
 stmt.close();
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/78f5915f/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 7afde98..83aefaa 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, 
useInstrumentedPool);
 try{
for (TargetTableRef table : tablesToBeLoaded) {
-   Path tablePath = new Path(outputPath, 
table.getPhysicalName());
+   Path tablePath = new Path(outputPath, 
table.getLogicalName());
Configuration jobConf = new Configuration(conf);
jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, 
qualifiedTableName);
if 
(qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {



phoenix git commit: PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 2db9d6d65 - 37220bbab


PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with 
InvocationTargetException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/37220bba
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/37220bba
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/37220bba

Branch: refs/heads/4.x-HBase-1.0
Commit: 37220bbab9cf992221a14bc3171130e51ef7cbf2
Parents: 2db9d6d
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 00:45:02 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 00:45:02 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 22 
 1 file changed, 18 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/37220bba/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index c6bd62f..cee1c85 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -36,8 +36,10 @@ import java.io.OutputStream;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.jar.Attributes;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
@@ -614,10 +616,22 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, 
1.0);
 FileOutputStream jarFos = new FileOutputStream(jarPath);
 JarOutputStream jarOutputStream = new JarOutputStream(jarFos, 
manifest);
-String pathToAdd =packageName.replace('.', File.separatorChar)
-+ File.separator;
-jarOutputStream.putNextEntry(new JarEntry(pathToAdd));
-jarOutputStream.closeEntry();
+String pathToAdd = packageName.replace('.', '/') + '/';
+String jarPathStr = new String(pathToAdd);
+SetString pathsInJar = new HashSetString();
+
+while (pathsInJar.add(jarPathStr)) {
+int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2);
+if (ix  0) {
+break;
+}
+jarPathStr = jarPathStr.substring(0, ix);
+}
+for (String pathInJar : pathsInJar) {
+jarOutputStream.putNextEntry(new JarEntry(pathInJar));
+jarOutputStream.closeEntry();
+}
+
 jarOutputStream.putNextEntry(new JarEntry(pathToAdd + 
classFile.getName()));
 byte[] allBytes = new byte[(int) classFile.length()];
 FileInputStream fis = new FileInputStream(classFile);



phoenix git commit: PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.0 ac7dc675e - a496896fc


PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with 
InvocationTargetException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a496896f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a496896f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a496896f

Branch: refs/heads/4.4-HBase-1.0
Commit: a496896fc980b330b4c5b1b9f9b7f438e4f972e8
Parents: ac7dc67
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 00:46:42 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 00:46:42 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 22 
 1 file changed, 18 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a496896f/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index c6bd62f..cee1c85 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -36,8 +36,10 @@ import java.io.OutputStream;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.jar.Attributes;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
@@ -614,10 +616,22 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, 
1.0);
 FileOutputStream jarFos = new FileOutputStream(jarPath);
 JarOutputStream jarOutputStream = new JarOutputStream(jarFos, 
manifest);
-String pathToAdd =packageName.replace('.', File.separatorChar)
-+ File.separator;
-jarOutputStream.putNextEntry(new JarEntry(pathToAdd));
-jarOutputStream.closeEntry();
+String pathToAdd = packageName.replace('.', '/') + '/';
+String jarPathStr = new String(pathToAdd);
+SetString pathsInJar = new HashSetString();
+
+while (pathsInJar.add(jarPathStr)) {
+int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2);
+if (ix  0) {
+break;
+}
+jarPathStr = jarPathStr.substring(0, ix);
+}
+for (String pathInJar : pathsInJar) {
+jarOutputStream.putNextEntry(new JarEntry(pathInJar));
+jarOutputStream.closeEntry();
+}
+
 jarOutputStream.putNextEntry(new JarEntry(pathToAdd + 
classFile.getName()));
 byte[] allBytes = new byte[(int) classFile.length()];
 FileInputStream fis = new FileInputStream(classFile);



phoenix git commit: PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/master b58a62a5e - 50f3a0412


PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with 
InvocationTargetException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/50f3a041
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/50f3a041
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/50f3a041

Branch: refs/heads/master
Commit: 50f3a04126c4fea59dc9eb978cef1399892d9a4a
Parents: b58a62a
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 00:44:25 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 00:44:25 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 22 
 1 file changed, 18 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/50f3a041/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index c6bd62f..cee1c85 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -36,8 +36,10 @@ import java.io.OutputStream;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.jar.Attributes;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
@@ -614,10 +616,22 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, 
1.0);
 FileOutputStream jarFos = new FileOutputStream(jarPath);
 JarOutputStream jarOutputStream = new JarOutputStream(jarFos, 
manifest);
-String pathToAdd =packageName.replace('.', File.separatorChar)
-+ File.separator;
-jarOutputStream.putNextEntry(new JarEntry(pathToAdd));
-jarOutputStream.closeEntry();
+String pathToAdd = packageName.replace('.', '/') + '/';
+String jarPathStr = new String(pathToAdd);
+SetString pathsInJar = new HashSetString();
+
+while (pathsInJar.add(jarPathStr)) {
+int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2);
+if (ix  0) {
+break;
+}
+jarPathStr = jarPathStr.substring(0, ix);
+}
+for (String pathInJar : pathsInJar) {
+jarOutputStream.putNextEntry(new JarEntry(pathInJar));
+jarOutputStream.closeEntry();
+}
+
 jarOutputStream.putNextEntry(new JarEntry(pathToAdd + 
classFile.getName()));
 byte[] allBytes = new byte[(int) classFile.length()];
 FileInputStream fis = new FileInputStream(classFile);



phoenix git commit: PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with InvocationTargetException(Rajeshbabu)

2015-06-24 Thread rajeshbabu
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.1 9165edb2a - f4b92cd14


PHOENIX-2068 UserDefinedFunctionsIT is failing in windows with 
InvocationTargetException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f4b92cd1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f4b92cd1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f4b92cd1

Branch: refs/heads/4.4-HBase-1.1
Commit: f4b92cd149daa65a3a95dc1907ebde69fa79b210
Parents: 9165edb
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jun 25 00:46:17 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jun 25 00:46:17 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 22 
 1 file changed, 18 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f4b92cd1/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index c6bd62f..cee1c85 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -36,8 +36,10 @@ import java.io.OutputStream;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.jar.Attributes;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
@@ -614,10 +616,22 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, 
1.0);
 FileOutputStream jarFos = new FileOutputStream(jarPath);
 JarOutputStream jarOutputStream = new JarOutputStream(jarFos, 
manifest);
-String pathToAdd =packageName.replace('.', File.separatorChar)
-+ File.separator;
-jarOutputStream.putNextEntry(new JarEntry(pathToAdd));
-jarOutputStream.closeEntry();
+String pathToAdd = packageName.replace('.', '/') + '/';
+String jarPathStr = new String(pathToAdd);
+SetString pathsInJar = new HashSetString();
+
+while (pathsInJar.add(jarPathStr)) {
+int ix = jarPathStr.lastIndexOf('/', jarPathStr.length() - 2);
+if (ix  0) {
+break;
+}
+jarPathStr = jarPathStr.substring(0, ix);
+}
+for (String pathInJar : pathsInJar) {
+jarOutputStream.putNextEntry(new JarEntry(pathInJar));
+jarOutputStream.closeEntry();
+}
+
 jarOutputStream.putNextEntry(new JarEntry(pathToAdd + 
classFile.getName()));
 byte[] allBytes = new byte[(int) classFile.length()];
 FileInputStream fis = new FileInputStream(classFile);



[15/49] phoenix git commit: PHOENIX-1964 - Pherf tests write output in module base directory

2015-06-24 Thread tdsilva
PHOENIX-1964 - Pherf tests write output in module base directory


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d3ff0798
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d3ff0798
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d3ff0798

Branch: refs/heads/json
Commit: d3ff0798f3e87bb489e3c91f7d11813503fe7861
Parents: 981ed47
Author: cmarcel cmar...@salesforce.com
Authored: Tue May 19 15:54:52 2015 -0700
Committer: cmarcel cmar...@salesforce.com
Committed: Wed May 20 13:36:34 2015 -0700

--
 phoenix-pherf/config/pherf.properties   |  3 ++
 .../org/apache/phoenix/pherf/DataIngestIT.java  |  3 +-
 .../apache/phoenix/pherf/ResultBaseTestIT.java  | 45 ++
 .../java/org/apache/phoenix/pherf/Pherf.java|  7 +--
 .../apache/phoenix/pherf/PherfConstants.java| 50 +++-
 .../phoenix/pherf/loaddata/DataLoader.java  |  2 +-
 .../apache/phoenix/pherf/result/ResultUtil.java |  4 +-
 .../pherf/result/impl/CSVResultHandler.java |  5 +-
 .../pherf/result/impl/ImageResultHandler.java   |  5 +-
 .../pherf/result/impl/XMLResultHandler.java |  6 ++-
 .../apache/phoenix/pherf/util/ResourceList.java | 26 --
 .../pherf/workload/WorkloadExecutor.java|  2 +-
 .../phoenix/pherf/ConfigurationParserTest.java  |  2 +-
 .../org/apache/phoenix/pherf/ResourceTest.java  |  8 ++--
 .../apache/phoenix/pherf/ResultBaseTest.java| 44 +
 .../org/apache/phoenix/pherf/ResultTest.java|  5 +-
 16 files changed, 168 insertions(+), 49 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d3ff0798/phoenix-pherf/config/pherf.properties
--
diff --git a/phoenix-pherf/config/pherf.properties 
b/phoenix-pherf/config/pherf.properties
index 354707a..1142f9b5 100644
--- a/phoenix-pherf/config/pherf.properties
+++ b/phoenix-pherf/config/pherf.properties
@@ -29,3 +29,6 @@ pherf.default.dataloader.threadpool=0
 # When upserting, this is the max # of rows that will be inserted in a single 
commit
 pherf.default.dataloader.batchsize=1000
 
+# Directory where results from a scenario run will be written
+pherf.default.results.dir=RESULTS
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d3ff0798/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index b29656d..2b56f43 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -18,7 +18,6 @@
 
 package org.apache.phoenix.pherf;
 
-import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.pherf.configuration.Column;
 import org.apache.phoenix.pherf.configuration.DataTypeMapping;
 import org.apache.phoenix.pherf.configuration.Scenario;
@@ -39,7 +38,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-public class DataIngestIT extends BaseHBaseManagedTimeIT {
+public class DataIngestIT extends ResultBaseTestIT {
 protected static PhoenixUtil util = new PhoenixUtil(true);
 static final String matcherScenario = .*scenario/.*test.*xml;
 static final String matcherSchema = .*datamodel/.*test.*sql;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d3ff0798/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
new file mode 100644
index 000..6e103b8
--- /dev/null
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   License); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an AS IS BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the 

[12/49] phoenix git commit: PHOENIX-1990 bin/queryserver makeWinServiceDesc doesn't actually work in Windows

2015-06-24 Thread tdsilva
PHOENIX-1990 bin/queryserver makeWinServiceDesc doesn't actually work in Windows


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c83ab9ed
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c83ab9ed
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c83ab9ed

Branch: refs/heads/json
Commit: c83ab9edba7b417a001fb702de5d893cbda95f29
Parents: 6fc53b5
Author: Nick Dimiduk ndimi...@apache.org
Authored: Mon May 18 16:00:31 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon May 18 16:00:31 2015 -0700

--
 bin/queryserver.py | 17 ++---
 1 file changed, 14 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c83ab9ed/bin/queryserver.py
--
diff --git a/bin/queryserver.py b/bin/queryserver.py
index 6a18741..7666246 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -78,11 +78,22 @@ phoenix_out_file = '%s.out' % phoenix_file_basename
 phoenix_pid_file = '%s.pid' % phoenix_file_basename
 opts = os.getenv('PHOENIX_QUERYSERVER_OPTS', '')
 
-# load hbase-env.sh to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
-hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
+# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
+hbase_env_path = None
+hbase_env_cmd  = None
+if os.name == 'posix':
+hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
+hbase_env_cmd = ['bash', '-c', 'source %s  env' % hbase_env_path]
+elif os.name == 'nt':
+hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
+hbase_env_cmd = ['cmd.exe', '/c', 'call %s  set' % hbase_env_path]
+if not hbase_env_path or not hbase_env_cmd:
+print  sys.stderr, hbase-env file unknown on platform %s % os.name
+sys.exit(-1)
+
 hbase_env = {}
 if os.path.isfile(hbase_env_path):
-p = subprocess.Popen(['bash', '-c', 'source %s  env' % hbase_env_path], 
stdout = subprocess.PIPE)
+p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
 for x in p.stdout:
 (k, _, v) = x.partition('=')
 hbase_env[k.strip()] = v.strip()



[06/49] phoenix git commit: PHOENIX-1875 implement ARRAY_PREPEND built in function (Dumindu)

2015-06-24 Thread tdsilva
PHOENIX-1875 implement ARRAY_PREPEND built in function (Dumindu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b5ef25c9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b5ef25c9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b5ef25c9

Branch: refs/heads/json
Commit: b5ef25c942fb0f4ab9a6fec66e821c5c3473ea46
Parents: 978b232
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Wed May 13 10:46:19 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Wed May 13 10:46:19 2015 +0530

--
 .../phoenix/end2end/ArrayPrependFunctionIT.java | 652 +++
 .../phoenix/expression/ExpressionType.java  |   4 +-
 .../function/ArrayAppendFunction.java   |  35 +-
 .../function/ArrayModifierFunction.java |  75 +++
 .../function/ArrayPrependFunction.java  |  96 +++
 .../phoenix/schema/types/PArrayDataType.java| 161 -
 .../expression/ArrayPrependFunctionTest.java| 552 
 7 files changed, 1541 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b5ef25c9/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayPrependFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayPrependFunctionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayPrependFunctionIT.java
new file mode 100644
index 000..3145d95
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayPrependFunctionIT.java
@@ -0,0 +1,652 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.*;
+
+import org.apache.phoenix.schema.TypeMismatchException;
+import org.apache.phoenix.schema.types.PhoenixArray;
+import org.junit.Test;
+
+public class ArrayPrependFunctionIT extends BaseHBaseManagedTimeIT {
+
+private void initTableWithVarArray(Connection conn, String type, Object[] 
objectArray, String value) throws SQLException {
+conn.createStatement().execute(CREATE TABLE t ( k VARCHAR PRIMARY 
KEY, a  + type + [],b  + type + ));
+conn.commit();
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t 
VALUES(?,?, + value + ));
+PhoenixArray array = (PhoenixArray) conn.createArrayOf(type, 
objectArray);
+stmt.setString(1, a);
+stmt.setArray(2, array);
+stmt.execute();
+conn.commit();
+
+}
+
+private void initTables(Connection conn) throws Exception {
+String ddl = CREATE TABLE regions (region_name VARCHAR PRIMARY 
KEY,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints 
BIGINT[],chars CHAR(15)[],double1 DOUBLE,char1 CHAR(17),nullcheck 
INTEGER,chars2 CHAR(15)[]);
+conn.createStatement().execute(ddl);
+String dml = UPSERT INTO 
regions(region_name,varchars,integers,doubles,bigints,chars,double1,char1,nullcheck,chars2)
 VALUES('SF Bay Area', +
+ARRAY['2345','46345','23234'], +
+ARRAY[2345,46345,23234,456], +
+ARRAY[23.45,46.345,23.234,45.6,5.78], +
+ARRAY[12,34,56,78,910], +
+ARRAY['a','','c','ddd','e'], +
+23.45, +
+'wert', +
+NULL, +
+ARRAY['foo','a','','c','ddd','e'] +
+);
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.execute();
+conn.commit();
+}
+
+private void initTablesDesc(Connection conn, String type, String val) 
throws Exception {
+String ddl = CREATE TABLE regions (pk  + type +  PRIMARY KEY 
DESC,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints 
BIGINT[],chars CHAR(15)[],chars2 CHAR(15)[], bools BOOLEAN[]);
+conn.createStatement().execute(ddl);
+String dml = UPSERT 

[05/49] phoenix git commit: PHOENIX-1962 Apply check style to the build

2015-06-24 Thread tdsilva
PHOENIX-1962 Apply check style to the build


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/978b2322
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/978b2322
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/978b2322

Branch: refs/heads/json
Commit: 978b2322e3e962550c1cddda9910f4f70346aaee
Parents: 93397af
Author: Nick Dimiduk ndimi...@apache.org
Authored: Sat May 9 11:10:54 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon May 11 09:52:00 2015 -0700

--
 phoenix-assembly/pom.xml|   4 +
 phoenix-core/pom.xml|   4 +
 phoenix-flume/pom.xml   |   4 +
 phoenix-pherf/pom.xml   |   1 +
 phoenix-pig/pom.xml |   4 +
 phoenix-server-client/pom.xml   |   4 +
 phoenix-server/pom.xml  |   4 +
 phoenix-spark/pom.xml   |   1 +
 pom.xml |  23 ++
 src/main/config/checkstyle/checker.xml  | 281 +++
 src/main/config/checkstyle/header.txt   |  16 ++
 src/main/config/checkstyle/suppressions.xml |  46 
 12 files changed, 392 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index d743bcf..5a73e7a 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -33,6 +33,10 @@
   descriptionAssemble Phoenix artifacts/description
   packagingpom/packaging
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index a4c052c..65e4f8e 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -24,6 +24,10 @@
   urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 resources
   resource

http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index b8c4b8a..a35e309 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 337f69c..1667c66 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -30,6 +30,7 @@
 namePhoenix - Pherf/name
 
 properties
+  top.dir${project.basedir}/../top.dir
 /properties
 
 profiles

http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index c1b0985..5005f7c 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
 dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 5e2d32e..e23fcba 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/978b2322/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index 4737b63..7dd09aa 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin


[02/49] phoenix git commit: PHOENIX-1956 SELECT (FALSE OR FALSE) RETURNS TRUE

2015-06-24 Thread tdsilva
PHOENIX-1956 SELECT (FALSE OR FALSE) RETURNS TRUE


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c2fee39e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c2fee39e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c2fee39e

Branch: refs/heads/json
Commit: c2fee39efff87930ab3a00d4ed36ec32a493cf7d
Parents: 45a919f
Author: James Taylor jtay...@salesforce.com
Authored: Fri May 8 13:13:44 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Fri May 8 13:14:24 2015 -0700

--
 .../org/apache/phoenix/end2end/EvaluationOfORIT.java | 11 +++
 .../org/apache/phoenix/compile/ExpressionCompiler.java   |  3 +--
 2 files changed, 12 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2fee39e/phoenix-core/src/it/java/org/apache/phoenix/end2end/EvaluationOfORIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/EvaluationOfORIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/EvaluationOfORIT.java
index c9cc1e2..4355036 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/EvaluationOfORIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/EvaluationOfORIT.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.end2end;
  
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.sql.Connection;
@@ -34,6 +35,16 @@ import org.junit.Test;
 
 public class EvaluationOfORIT extends BaseHBaseManagedTimeIT{

+@Test
+public void testFalseOrFalse() throws SQLException {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+ResultSet rs = conn.createStatement().executeQuery(SELECT (FALSE OR 
FALSE) AS B FROM SYSTEM.CATALOG LIMIT 1);
+assertTrue(rs.next());
+assertFalse(rs.getBoolean(1));
+conn.close();
+}
+
@Test
public void testPKOrNotPKInOREvaluation() throws SQLException {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2fee39e/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
index 92899a6..66c1b85 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
@@ -71,7 +71,6 @@ import 
org.apache.phoenix.expression.function.ArrayAnyComparisonExpression;
 import org.apache.phoenix.expression.function.ArrayElemRefExpression;
 import org.apache.phoenix.expression.function.RoundDecimalExpression;
 import org.apache.phoenix.expression.function.RoundTimestampExpression;
-import org.apache.phoenix.expression.function.UDFExpression;
 import org.apache.phoenix.parse.AddParseNode;
 import org.apache.phoenix.parse.AndParseNode;
 import org.apache.phoenix.parse.ArithmeticParseNode;
@@ -261,7 +260,7 @@ public class ExpressionCompiler extends 
UnsupportedAllParseNodeVisitorExpressio
 determinism = determinism.combine(child.getDeterminism());
 }
 if (children.size() == 0) {
-return LiteralExpression.newConstant(true, determinism);
+return LiteralExpression.newConstant(false, determinism);
 }
 if (children.size() == 1) {
 return children.get(0);



[41/49] phoenix git commit: PHOENIX-2032 psql.py is broken after PHOENIX-2013

2015-06-24 Thread tdsilva
PHOENIX-2032 psql.py is broken after PHOENIX-2013


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d1934afb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d1934afb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d1934afb

Branch: refs/heads/json
Commit: d1934afbe6230e823b9009950fe721165e98cc7c
Parents: bfb0eee
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri Jun 12 10:23:05 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri Jun 12 12:12:09 2015 -0700

--
 phoenix-assembly/pom.xml  |  4 
 phoenix-assembly/src/build/client.xml | 27 +++
 2 files changed, 23 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d1934afb/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 51ff74d..baf6738 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -152,10 +152,6 @@
 /dependency
 dependency
   groupIdorg.apache.phoenix/groupId
-  artifactIdphoenix-spark/artifactId
-/dependency
-dependency
-  groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-server/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d1934afb/phoenix-assembly/src/build/client.xml
--
diff --git a/phoenix-assembly/src/build/client.xml 
b/phoenix-assembly/src/build/client.xml
index 4bd4544..0e1e1f6 100644
--- a/phoenix-assembly/src/build/client.xml
+++ b/phoenix-assembly/src/build/client.xml
@@ -63,13 +63,32 @@
 /dependencySet
 
 !-- Make sure we get all the components, not just the minimal client ones 
(e.g.
-  phoenix-flume, phoenix-pig, etc) --
+  phoenix-flume, phoenix-pig, etc). We should exclude phoenix-server and
+  phoenix-server-client in the future, see PHOENIX-2032, PHOENIX-2038 --
 dependencySet
   outputDirectory//outputDirectory
   unpacktrue/unpack
-  includes
-includeorg.apache.phoenix:phoenix-*/include
-  /includes
+  !-- multiple deps provide some variant of LICENSE files/directories. 
These
+   overwrite each other at best, at worst conflict on case-insensitive
+   filesystems like HDFS+ and FAT32. Just exclude them --
+  unpackOptions
+excludes
+  exclude*license*/exclude
+  exclude*LICENSE*/exclude
+  exclude**/license/**/exclude
+  exclude**/LICENSE/**/exclude
+/excludes
+  /unpackOptions
+  !-- this is default, but make intentions clear --
+  useTransitiveDependenciestrue/useTransitiveDependencies
+  !-- When include subelements are present, they define a set of
+   artifact coordinates to include. If none is present, then includes
+   represents all valid values
+   
https://maven.apache.org/plugins/maven-assembly-plugin/assembly.html#class_dependencySet
+   This means bring in all dependencies transitively of the
+   phoenix-assembly module.
+  --
+  includes /
 /dependencySet
   /dependencySets
 /assembly



[03/49] phoenix git commit: PHOENIX-1958 Minimize memory allocation on new connection

2015-06-24 Thread tdsilva
PHOENIX-1958 Minimize memory allocation on new connection


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cd81738b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cd81738b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cd81738b

Branch: refs/heads/json
Commit: cd81738b1fbcb5cf19123b2dca8da31f602b9c64
Parents: c2fee39
Author: James Taylor jtay...@salesforce.com
Authored: Sat May 9 10:18:57 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sat May 9 10:18:57 2015 -0700

--
 .../apache/phoenix/jdbc/PhoenixConnection.java  | 41 +++-
 .../org/apache/phoenix/util/ReadOnlyProps.java  | 32 +++
 2 files changed, 54 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cd81738b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
index c22a7fa..dad60c1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java
@@ -48,7 +48,6 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.concurrent.Executor;
 
@@ -56,6 +55,8 @@ import javax.annotation.Nullable;
 
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Consistency;
+import org.apache.htrace.Sampler;
+import org.apache.htrace.TraceScope;
 import org.apache.phoenix.call.CallRunner;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -95,15 +96,12 @@ import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SQLCloseable;
 import org.apache.phoenix.util.SQLCloseables;
-import org.apache.htrace.Sampler;
-import org.apache.htrace.TraceScope;
 
 import com.google.common.base.Objects;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableMap.Builder;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 
 
 /**
@@ -185,21 +183,9 @@ public class PhoenixConnection implements Connection, 
org.apache.phoenix.jdbc.Jd
 if (tenantId != null) {
 services = 
services.getChildQueryServices(tenantId.getBytesPtr());
 }
-// TODO: we could avoid creating another wrapper if the only 
property
-// specified was for the tenant ID
-MapString, String existingProps = services.getProps().asMap();
-final MapString, String tmpAugmentedProps = 
Maps.newHashMapWithExpectedSize(existingProps.size() + info.size());
-tmpAugmentedProps.putAll(existingProps);
-boolean needsDelegate = false;
-for (EntryObject, Object entry : this.info.entrySet()) {
-String key = entry.getKey().toString();
-String value = entry.getValue().toString();
-String oldValue = tmpAugmentedProps.put(key, value);
-needsDelegate |= !Objects.equal(oldValue, value);
-}
-this.services = !needsDelegate ? services : new 
DelegateConnectionQueryServices(services) {
-final ReadOnlyProps augmentedProps = new 
ReadOnlyProps(tmpAugmentedProps);
-
+ReadOnlyProps currentProps = services.getProps();
+final ReadOnlyProps augmentedProps = 
currentProps.addAll(filterKnownNonProperties(this.info));
+this.services = augmentedProps == currentProps ? services : new 
DelegateConnectionQueryServices(services) {
 @Override
 public ReadOnlyProps getProps() {
 return augmentedProps;
@@ -261,6 +247,23 @@ public class PhoenixConnection implements Connection, 
org.apache.phoenix.jdbc.Jd
 this.customTracingAnnotations = getImmutableCustomTracingAnnotations();
 }
 
+private static Properties filterKnownNonProperties(Properties info) {
+Properties prunedProperties = info;
+if (info.contains(PhoenixRuntime.CURRENT_SCN_ATTRIB)) {
+if (prunedProperties == info) {
+prunedProperties = PropertiesUtil.deepCopy(info);
+}
+prunedProperties.remove(PhoenixRuntime.CURRENT_SCN_ATTRIB);
+}
+if (info.contains(PhoenixRuntime.TENANT_ID_ATTRIB)) {
+if (prunedProperties == info) {
+

[19/49] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-06-24 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/edff624f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
index 272cac6..e7e1dd7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
@@ -111,7 +111,7 @@ public class StatisticsCollector {
 this.statsTable.close();
 }
 
-public void updateStatistic(HRegion region) {
+public void updateStatistic(Region region) {
 try {
 ArrayListMutation mutations = new ArrayListMutation();
 writeStatsToStatsTable(region, true, mutations, 
TimeKeeper.SYSTEM.getCurrentTime());
@@ -126,7 +126,7 @@ public class StatisticsCollector {
 }
 }
 
-private void writeStatsToStatsTable(final HRegion region,
+private void writeStatsToStatsTable(final Region region,
 boolean delete, ListMutation mutations, long currentTime) throws 
IOException {
 try {
 // update the statistics table
@@ -215,7 +215,7 @@ public class StatisticsCollector {
 }
 }
 
-public InternalScanner createCompactionScanner(HRegion region, Store 
store, InternalScanner s) throws IOException {
+public InternalScanner createCompactionScanner(Region region, Store store, 
InternalScanner s) throws IOException {
 // See if this is for Major compaction
 if (logger.isDebugEnabled()) {
 logger.debug(Compaction scanner created for stats);
@@ -224,13 +224,13 @@ public class StatisticsCollector {
 return getInternalScanner(region, store, s, cfKey);
 }
 
-public void splitStats(HRegion parent, HRegion left, HRegion right) {
+public void splitStats(Region parent, Region left, Region right) {
 try {
 if (logger.isDebugEnabled()) {
 logger.debug(Collecting stats for split of  + 
parent.getRegionInfo() +  into  + left.getRegionInfo() +  and  + 
right.getRegionInfo());
 }
 ListMutation mutations = Lists.newArrayListWithExpectedSize(3);
-for (byte[] fam : parent.getStores().keySet()) {
+for (byte[] fam : parent.getTableDesc().getFamiliesKeys()) {
statsTable.splitStats(parent, left, right, this, new 
ImmutableBytesPtr(fam), mutations);
 }
 if (logger.isDebugEnabled()) {
@@ -243,7 +243,7 @@ public class StatisticsCollector {
 }
 }
 
-protected InternalScanner getInternalScanner(HRegion region, Store store,
+protected InternalScanner getInternalScanner(Region region, Store store,
 InternalScanner internalScan, ImmutableBytesPtr family) {
 return new StatisticsScanner(this, statsTable, region, internalScan, 
family);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/edff624f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
index 0e50923..582c4de 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
@@ -26,9 +26,9 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 
 /**
@@ -38,11 +38,11 @@ public class StatisticsScanner implements InternalScanner {
 private static final Log LOG = LogFactory.getLog(StatisticsScanner.class);
 private InternalScanner delegate;
 private StatisticsWriter 

[09/49] phoenix git commit: PHOENIX-1963 - Irregular failures in ResultTest#testMonitorResult

2015-06-24 Thread tdsilva
PHOENIX-1963 - Irregular failures in ResultTest#testMonitorResult


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/289a875b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/289a875b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/289a875b

Branch: refs/heads/json
Commit: 289a875bd1cd76b6437ae1400d6c324bfe3e0754
Parents: a1032fb
Author: cmarcel cmar...@salesforce.com
Authored: Thu May 14 15:56:46 2015 -0700
Committer: cmarcel cmar...@salesforce.com
Committed: Thu May 14 15:57:00 2015 -0700

--
 phoenix-pherf/cluster/pherf.sh  | 2 +-
 .../main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java  | 5 ++---
 .../src/test/java/org/apache/phoenix/pherf/ResultTest.java  | 4 ++--
 phoenix-pherf/standalone/pherf.sh   | 2 +-
 4 files changed, 6 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/289a875b/phoenix-pherf/cluster/pherf.sh
--
diff --git a/phoenix-pherf/cluster/pherf.sh b/phoenix-pherf/cluster/pherf.sh
index aeff856..8d58dfe 100755
--- a/phoenix-pherf/cluster/pherf.sh
+++ b/phoenix-pherf/cluster/pherf.sh
@@ -28,6 +28,6 @@ for f in $PHERF_HOME/lib/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-CMD=time $}JAVA_HOME}/bin/java ${REMOTE_DEBUG} -Dapp.home=${PHERF_HOME} 
${ENV_PROPS} -Xms512m -Xmx3072m -cp ${CLASSPATH} org.apache.phoenix.pherf.Pherf 
${@}
+CMD=time ${JAVA_HOME}/bin/java ${REMOTE_DEBUG} -Dapp.home=${PHERF_HOME} 
${ENV_PROPS} -Xms512m -Xmx3072m -cp ${CLASSPATH} org.apache.phoenix.pherf.Pherf 
${@}
 
 eval $CMD
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/289a875b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
--
diff --git 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
index 9f46cf7..6f97551 100644
--- 
a/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
+++ 
b/phoenix-pherf/src/main/java/org/apache/phoenix/pherf/jmx/MonitorManager.java
@@ -106,8 +106,9 @@ public class MonitorManager implements Runnable {
 rowCount.getAndIncrement();
 }
 try {
+resultHandler.flush();
 Thread.sleep(getMonitorFrequency());
-} catch (InterruptedException e) {
+} catch (Exception e) {
 Thread.currentThread().interrupt();
 e.printStackTrace();
 }
@@ -117,9 +118,7 @@ public class MonitorManager implements Runnable {
 try {
 isRunning = false;
 if (resultHandler != null) {
-resultHandler.flush();
 resultHandler.close();
-
 }
 } catch (Exception e) {
 throw new FileLoaderRuntimeException(Could not close monitor 
results., e);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/289a875b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
--
diff --git 
a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java 
b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
index 0f4dfd1..c51f0dc 100644
--- a/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
+++ b/phoenix-pherf/src/test/java/org/apache/phoenix/pherf/ResultTest.java
@@ -55,7 +55,7 @@ public class ResultTest {
 resultMonitorWriter.write(result);
 resultMonitorWriter.write(result);
 resultMonitorWriter.write(result);
-resultMonitorWriter.flush();
+resultMonitorWriter.close();
 ListResult results = resultMonitorWriter.read();
 assertEquals(Results did not contain row., results.size(), 3);
 
@@ -72,7 +72,7 @@ public class ResultTest {
 ExecutorService executorService = Executors.newFixedThreadPool(1);
 MonitorManager monitor = new MonitorManager(100);
 Future future = executorService.submit(monitor);
-ListResult records = null;
+ListResult records;
 final int TIMEOUT = 30;
 
 int ct = 0;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/289a875b/phoenix-pherf/standalone/pherf.sh
--
diff --git a/phoenix-pherf/standalone/pherf.sh 
b/phoenix-pherf/standalone/pherf.sh
index e08035a..2b91d2c 100755
--- 

[17/49] phoenix git commit: PHOENIX-1996 Use BytesStringer instead of ZeroCopyByteString

2015-06-24 Thread tdsilva
PHOENIX-1996 Use BytesStringer instead of ZeroCopyByteString


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/286ff26d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/286ff26d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/286ff26d

Branch: refs/heads/json
Commit: 286ff26d82b2638dc5d3db850fa6f4537ab6153f
Parents: c2fed1d
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 20 14:17:47 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Thu May 21 09:25:34 2015 -0700

--
 .../apache/phoenix/cache/ServerCacheClient.java | 10 +++
 .../phoenix/coprocessor/MetaDataProtocol.java   |  9 +++
 .../org/apache/phoenix/parse/PFunction.java |  5 ++--
 .../apache/phoenix/protobuf/ProtobufUtil.java   |  4 +--
 .../query/ConnectionQueryServicesImpl.java  | 18 ++---
 .../org/apache/phoenix/schema/PColumnImpl.java  |  8 +++---
 .../org/apache/phoenix/schema/PTableImpl.java   | 28 ++--
 7 files changed, 40 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/286ff26d/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index 1233e1c..9718709 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.compile.ScanRanges;
 import org.apache.phoenix.coprocessor.ServerCachingProtocol.ServerCacheFactory;
@@ -68,7 +69,6 @@ import org.apache.phoenix.util.SQLCloseables;
 import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.collect.ImmutableSet;
-import com.google.protobuf.HBaseZeroCopyByteString;
 
 /**
  * 
@@ -194,9 +194,9 @@ public class ServerCacheClient {
 new 
BlockingRpcCallbackAddServerCacheResponse();
 
AddServerCacheRequest.Builder builder = AddServerCacheRequest.newBuilder();
 
if(connection.getTenantId() != null){
-
builder.setTenantId(HBaseZeroCopyByteString.wrap(connection.getTenantId().getBytes()));
+
builder.setTenantId(ByteStringer.wrap(connection.getTenantId().getBytes()));
 }
-
builder.setCacheId(HBaseZeroCopyByteString.wrap(cacheId));
+
builder.setCacheId(ByteStringer.wrap(cacheId));
 
builder.setCachePtr(org.apache.phoenix.protobuf.ProtobufUtil.toProto(cachePtr));
 
ServerCacheFactoryProtos.ServerCacheFactory.Builder svrCacheFactoryBuider = 
ServerCacheFactoryProtos.ServerCacheFactory.newBuilder();
 
svrCacheFactoryBuider.setClassName(cacheFactory.getClass().getName());
@@ -307,9 +307,9 @@ public class ServerCacheClient {
new 
BlockingRpcCallbackRemoveServerCacheResponse();

RemoveServerCacheRequest.Builder builder = 
RemoveServerCacheRequest.newBuilder();

if(connection.getTenantId() != null){
-   
builder.setTenantId(HBaseZeroCopyByteString.wrap(connection.getTenantId().getBytes()));
+   
builder.setTenantId(ByteStringer.wrap(connection.getTenantId().getBytes()));
}
-   
builder.setCacheId(HBaseZeroCopyByteString.wrap(cacheId));
+   
builder.setCacheId(ByteStringer.wrap(cacheId));

instance.removeServerCache(controller, builder.build(), rpcCallback);
   

[45/49] phoenix git commit: PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data types

2015-06-24 Thread tdsilva
PHOENIX-1981 : PhoenixHBase Load and Store Funcs should handle all Pig data 
types


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8076126a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8076126a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8076126a

Branch: refs/heads/json
Commit: 8076126a741a0cf2a5839b88904fa08bfdfb6cdb
Parents: b61ef77
Author: Prashant Kommireddi 
pkommire...@pkommireddi-ltm.internal.salesforce.com
Authored: Mon May 18 19:41:08 2015 -0700
Committer: Eli Levine elilev...@apache.org
Committed: Mon Jun 15 18:17:44 2015 -0700

--
 .../org/apache/phoenix/pig/util/TypeUtil.java   | 415 +--
 .../apache/phoenix/pig/util/TypeUtilTest.java   |  52 +++
 2 files changed, 251 insertions(+), 216 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8076126a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
index bdee3a4..6549445 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/util/TypeUtil.java
@@ -1,19 +1,11 @@
 /*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE
+ * file distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the License); you may not 
use this file except in compliance with the
+ * License. You may obtain a copy of the License at 
http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+ * applicable law or agreed to in writing, software distributed under the 
License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
See the License for the specific language
+ * governing permissions and limitations under the License.
  */
 
 package org.apache.phoenix.pig.util;
@@ -29,11 +21,11 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.phoenix.pig.writable.PhoenixPigDBWritable;
 import org.apache.phoenix.schema.types.PBinary;
-import org.apache.phoenix.schema.types.PChar;
-import org.apache.phoenix.schema.types.PDecimal;
 import org.apache.phoenix.schema.types.PBoolean;
+import org.apache.phoenix.schema.types.PChar;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDate;
+import org.apache.phoenix.schema.types.PDecimal;
 import org.apache.phoenix.schema.types.PDouble;
 import org.apache.phoenix.schema.types.PFloat;
 import org.apache.phoenix.schema.types.PInteger;
@@ -56,7 +48,6 @@ import org.apache.phoenix.schema.types.PVarchar;
 import org.apache.pig.PigException;
 import org.apache.pig.ResourceSchema.ResourceFieldSchema;
 import org.apache.pig.backend.hadoop.hbase.HBaseBinaryConverter;
-import org.apache.pig.builtin.Utf8StorageConverter;
 import org.apache.pig.data.DataByteArray;
 import org.apache.pig.data.DataType;
 import org.apache.pig.data.Tuple;
@@ -68,258 +59,250 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableMap.Builder;
 
 public final class TypeUtil {
-   
+
 private static final Log LOG = LogFactory.getLog(TypeUtil.class);
-private static final HBaseBinaryConverter binaryConverter = new 
HBaseBinaryConverter ();
-   private static final ImmutableMapPDataType,Byte 
phoenixTypeToPigDataType = init();
-   
-   private TypeUtil(){
-   }
-   
-   /**
-* A map of Phoenix to Pig data types.
-* @return
-*/
-   private static ImmutableMapPDataType, Byte init() {
-final ImmutableMap.BuilderPDataType,Byte builder = new 
BuilderPDataType,Byte ();
-   

[43/49] phoenix git commit: PHOENIX-1660 Implement missing math built-in functions ABS, POWER, LN, LOG, SQRT, CBRT, EXP (Shuxiong Ye)

2015-06-24 Thread tdsilva
PHOENIX-1660 Implement missing math built-in functions ABS, POWER, LN, LOG, 
SQRT, CBRT, EXP (Shuxiong Ye)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c2927dde
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c2927dde
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c2927dde

Branch: refs/heads/json
Commit: c2927ddec5ab954dd779516ed29b4b7fa4b011d9
Parents: d1934af
Author: James Taylor jamestay...@apache.org
Authored: Mon Jun 15 15:53:44 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Mon Jun 15 15:53:44 2015 -0700

--
 .../phoenix/end2end/AbsFunctionEnd2EndIT.java   | 108 +++
 .../phoenix/end2end/CbrtFunctionEnd2EndIT.java  | 143 +++
 .../phoenix/end2end/ExpFunctionEnd2EndIT.java   | 128 +
 .../phoenix/end2end/LnLogFunctionEnd2EndIT.java | 143 +++
 .../phoenix/end2end/PowerFunctionEnd2EndIT.java | 144 +++
 .../phoenix/expression/ExpressionType.java  |  14 +-
 .../expression/function/AbsFunction.java|  66 +++
 .../expression/function/CbrtFunction.java   |  55 ++
 .../expression/function/ExpFunction.java|  55 ++
 .../function/JavaMathOneArgumentFunction.java   |  43 ++---
 .../function/JavaMathTwoArgumentFunction.java   |  69 +++
 .../phoenix/expression/function/LnFunction.java |  55 ++
 .../expression/function/LogFunction.java|  56 ++
 .../expression/function/PowerFunction.java  |  51 ++
 .../expression/function/ScalarFunction.java |   4 +-
 .../expression/function/SqrtFunction.java   |   8 +-
 .../apache/phoenix/schema/types/PDecimal.java   |  11 ++
 .../phoenix/schema/types/PNumericType.java  |   8 +
 .../phoenix/schema/types/PRealNumber.java   |   8 +
 .../phoenix/schema/types/PWholeNumber.java  |   8 +
 .../phoenix/compile/QueryCompilerTest.java  |  68 ++-
 .../phoenix/expression/AbsFunctionTest.java | 180 ++
 .../phoenix/expression/CbrtFunctionTest.java| 127 +
 .../phoenix/expression/ExpFunctionTest.java | 150 +++
 .../phoenix/expression/LnLogFunctionTest.java   | 182 +++
 .../phoenix/expression/PowerFunctionTest.java   | 182 +++
 26 files changed, 2036 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2927dde/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java
new file mode 100644
index 000..0c6204c
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AbsFunctionEnd2EndIT.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.closeStmtAndConn;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+
+import org.apache.phoenix.expression.function.AbsFunction;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * End to end tests for {@link AbsFunction}
+ */
+public class AbsFunctionEnd2EndIT extends BaseHBaseManagedTimeIT {
+
+private static final String KEY = key;
+
+@Before
+public void initTable() throws Exception {
+Connection conn = null;
+PreparedStatement stmt = null;
+try {
+conn = DriverManager.getConnection(getUrl());
+String ddl;
+ddl = CREATE TABLE testSigned (k VARCHAR NOT NULL PRIMARY KEY, 
dec DECIMAL, doub DOUBLE, fl FLOAT, inte INTEGER, lon BIGINT, smalli SMALLINT, 
tinyi TINYINT);
+conn.createStatement().execute(ddl);
+conn.commit();
+  

[16/49] phoenix git commit: PHOENIX-1984 Make INSTR 1-based instead of 0-based

2015-06-24 Thread tdsilva
PHOENIX-1984 Make INSTR 1-based instead of 0-based

Bring functionality of INSTR built-in function in-line with other
SQL string functions, with indexing of strings starting at 1.

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c2fed1da
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c2fed1da
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c2fed1da

Branch: refs/heads/json
Commit: c2fed1dac8305f489939fc18e47cd2c2a6c596d8
Parents: d3ff079
Author: NAVEEN MADHIRE vmadh...@indiana.edu
Authored: Mon May 18 22:14:57 2015 -0500
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu May 21 17:24:47 2015 +0200

--
 .../apache/phoenix/end2end/InstrFunctionIT.java | 12 ++---
 .../expression/function/InstrFunction.java  |  2 +-
 .../expression/function/InstrFunctionTest.java  | 48 ++--
 3 files changed, 31 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2fed1da/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
index 57c0661..b869ff4 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
@@ -63,7 +63,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -71,7 +71,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -79,7 +79,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -87,7 +87,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -95,7 +95,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 }
 
 @Test
@@ -103,7 +103,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT 
{
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 } 
 
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2fed1da/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
index 317d4b3..7a002f8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
@@ -82,7 +82,7 @@ public class InstrFunction extends ScalarFunction{
 
 String sourceStr = (String) PVarchar.INSTANCE.toObject(ptr, 
getChildren().get(0).getSortOrder());
 
-position = sourceStr.indexOf(strToSearch);
+position = sourceStr.indexOf(strToSearch) + 1;
 ptr.set(PInteger.INSTANCE.toBytes(position));
 return true;
 }


[29/49] phoenix git commit: PHOENIX-2007 java.sql.SQLException: Encountered exception in sub plan [0] execution(Alicia Ying Shu)

2015-06-24 Thread tdsilva
PHOENIX-2007 java.sql.SQLException: Encountered exception in sub plan [0] 
execution(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/82df3b97
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/82df3b97
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/82df3b97

Branch: refs/heads/json
Commit: 82df3b97a9ca88605f78b59e547819ff3bf9cd7a
Parents: 583b5b1
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon Jun 1 21:04:43 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon Jun 1 21:04:43 2015 +0530

--
 .../org/apache/phoenix/end2end/HashJoinIT.java  | 54 
 .../apache/phoenix/execute/HashJoinPlan.java|  7 +--
 2 files changed, 58 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/82df3b97/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
index a03204a..88e03ca 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
@@ -3813,6 +3813,60 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
 }
 }
 
+@Test
+public void testSubqueryWithoutData() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+
+try {
+String GRAMMAR_TABLE = CREATE TABLE IF NOT EXISTS GRAMMAR_TABLE 
(ID INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String LARGE_TABLE = CREATE TABLE IF NOT EXISTS LARGE_TABLE (ID 
INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String SECONDARY_LARGE_TABLE = CREATE TABLE IF NOT EXISTS 
SECONDARY_LARGE_TABLE (SEC_ID INTEGER PRIMARY KEY, +
+sec_unsig_id UNSIGNED_INT, sec_big_id BIGINT, 
sec_usnig_long_id UNSIGNED_LONG, sec_tiny_id TINYINT, + 
+sec_unsig_tiny_id UNSIGNED_TINYINT, sec_small_id 
SMALLINT, sec_unsig_small_id UNSIGNED_SMALLINT, + 
+sec_float_id FLOAT, sec_unsig_float_id UNSIGNED_FLOAT, 
sec_double_id DOUBLE, sec_unsig_double_id UNSIGNED_DOUBLE, +
+sec_decimal_id DECIMAL, sec_boolean_id BOOLEAN, 
sec_time_id TIME, sec_date_id DATE, +
+sec_timestamp_id TIMESTAMP, sec_unsig_time_id TIME, 
sec_unsig_date_id DATE, sec_unsig_timestamp_id TIMESTAMP, +
+sec_varchar_id VARCHAR (30), sec_char_id CHAR (30), 
sec_binary_id BINARY (100), sec_varbinary_id VARBINARY (100));
+createTestTable(getUrl(), GRAMMAR_TABLE);
+createTestTable(getUrl(), LARGE_TABLE);
+createTestTable(getUrl(), SECONDARY_LARGE_TABLE);
+
+String ddl = SELECT * FROM (SELECT ID, BIG_ID, DATE_ID FROM 
LARGE_TABLE AS A WHERE (A.ID % 5) = 0) AS A  +
+INNER JOIN (SELECT SEC_ID, SEC_TINY_ID, 
SEC_UNSIG_FLOAT_ID FROM SECONDARY_LARGE_TABLE AS B WHERE (B.SEC_ID % 5) = 0) AS 
B  + 
+ON A.ID=B.SEC_ID WHERE A.DATE_ID  ALL (SELECT 
SEC_DATE_ID FROM SECONDARY_LARGE_TABLE LIMIT 100)  +  
+AND B.SEC_UNSIG_FLOAT_ID = ANY (SELECT sec_unsig_float_id 
FROM 

[21/49] phoenix git commit: PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-06-24 Thread tdsilva
PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a28c1d3b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a28c1d3b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a28c1d3b

Branch: refs/heads/json
Commit: a28c1d3b2d31377f70e0a4c661c3c70d8bc99216
Parents: edff624
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sat May 23 23:27:27 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sat May 23 23:27:27 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a28c1d3b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..4aa28c4 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,6 +620,8 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
+// Remove this configuration once hbase has HBASE-13756 fix.
+conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[39/49] phoenix git commit: PHOENIX-2033 PQS log environment details on launch

2015-06-24 Thread tdsilva
PHOENIX-2033 PQS log environment details on launch


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/67fea166
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/67fea166
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/67fea166

Branch: refs/heads/json
Commit: 67fea1665d6ebb963e0dff335f513e4f61cbd22c
Parents: 31a1ca6
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue Jun 9 17:12:21 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri Jun 12 09:33:56 2015 -0700

--
 .../apache/phoenix/queryserver/server/Main.java | 69 
 1 file changed, 69 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/67fea166/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
--
diff --git 
a/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java 
b/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
index 55febc5..9f9bfc7 100644
--- 
a/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
+++ 
b/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
@@ -34,7 +34,12 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
 import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -50,6 +55,11 @@ public final class Main extends Configured implements Tool, 
Runnable {
   phoenix.queryserver.http.port;
   public static final int DEFAULT_HTTP_PORT = 8765;
 
+  public static final String QUERY_SERVER_ENV_LOGGING_KEY =
+  phoenix.queryserver.envvars.logging.disabled;
+  public static final String QUERY_SERVER_ENV_LOGGING_SKIPWORDS_KEY =
+  phoenix.queryserver.envvars.logging.skipwords;
+
   public static final String KEYTAB_FILENAME_KEY = 
phoenix.queryserver.keytab.file;
   public static final String KERBEROS_PRINCIPAL_KEY = 
phoenix.queryserver.kerberos.principal;
   public static final String DNS_NAMESERVER_KEY = 
phoenix.queryserver.dns.nameserver;
@@ -58,12 +68,70 @@ public final class Main extends Configured implements Tool, 
Runnable {
 
   protected static final Log LOG = LogFactory.getLog(Main.class);
 
+  @SuppressWarnings(serial)
+  private static final SetString DEFAULT_SKIP_WORDS = new HashSetString() {
+{
+  add(secret);
+  add(passwd);
+  add(password);
+  add(credential);
+}
+  };
+
   private final String[] argv;
   private final CountDownLatch runningLatch = new CountDownLatch(1);
   private HttpServer server = null;
   private int retCode = 0;
   private Throwable t = null;
 
+  /**
+   * Log information about the currently running JVM.
+   */
+  public static void logJVMInfo() {
+// Print out vm stats before starting up.
+RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();
+if (runtime != null) {
+  LOG.info(vmName= + runtime.getVmName() + , vmVendor= +
+  runtime.getVmVendor() + , vmVersion= + runtime.getVmVersion());
+  LOG.info(vmInputArguments= + runtime.getInputArguments());
+}
+  }
+
+  /**
+   * Logs information about the currently running JVM process including
+   * the environment variables. Logging of env vars can be disabled by
+   * setting {@code phoenix.envvars.logging.disabled} to {@code true}.
+   * pIf enabled, you can also exclude environment variables containing
+   * certain substrings by setting {@code phoenix.envvars.logging.skipwords}
+   * to comma separated list of such substrings.
+   */
+  public static void logProcessInfo(Configuration conf) {
+// log environment variables unless asked not to
+if (conf == null || !conf.getBoolean(QUERY_SERVER_ENV_LOGGING_KEY, false)) 
{
+  SetString skipWords = new HashSetString(DEFAULT_SKIP_WORDS);
+  if (conf != null) {
+String[] confSkipWords = 
conf.getStrings(QUERY_SERVER_ENV_LOGGING_SKIPWORDS_KEY);
+if (confSkipWords != null) {
+  skipWords.addAll(Arrays.asList(confSkipWords));
+}
+  }
+
+  nextEnv:
+  for (Map.EntryString, String entry : System.getenv().entrySet()) {
+String key = entry.getKey().toLowerCase();
+String value = entry.getValue().toLowerCase();
+// exclude variables which may contain skip words
+for(String skipWord : skipWords) {
+  if (key.contains(skipWord) || value.contains(skipWord))
+continue nextEnv;
+}
+LOG.info(env:+entry);
+  }
+}
+// and JVM info
+  

[35/49] phoenix git commit: PHOENIX-2018 Implement math build-in function SQRT (Shuxiong Ye)

2015-06-24 Thread tdsilva
PHOENIX-2018 Implement math build-in function SQRT (Shuxiong Ye)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e54c99d8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e54c99d8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e54c99d8

Branch: refs/heads/json
Commit: e54c99d8b1ce7bd6118df46209e102e9a86c3782
Parents: 47466e3
Author: James Taylor jamestay...@apache.org
Authored: Thu Jun 4 14:26:27 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Thu Jun 4 14:26:27 2015 -0700

--
 .../phoenix/end2end/SqrtFunctionEnd2EndIT.java  | 143 ++
 .../phoenix/expression/ExpressionType.java  |   4 +-
 .../function/JavaMathOneArgumentFunction.java   |  77 ++
 .../expression/function/SqrtFunction.java   |  49 ++
 .../phoenix/expression/SqrtFunctionTest.java| 150 +++
 5 files changed, 422 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e54c99d8/phoenix-core/src/it/java/org/apache/phoenix/end2end/SqrtFunctionEnd2EndIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SqrtFunctionEnd2EndIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SqrtFunctionEnd2EndIT.java
new file mode 100644
index 000..50fdd4f
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SqrtFunctionEnd2EndIT.java
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.closeStmtAndConn;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+
+import org.apache.phoenix.expression.function.SqrtFunction;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * End to end tests for {@link SqrtFunction}
+ */
+public class SqrtFunctionEnd2EndIT extends BaseHBaseManagedTimeIT {
+
+private static final String KEY = key;
+private static final double ZERO = 1e-8;
+
+@Before
+public void initTable() throws Exception {
+Connection conn = null;
+PreparedStatement stmt = null;
+try {
+conn = DriverManager.getConnection(getUrl());
+String ddl;
+ddl = CREATE TABLE testSigned (k VARCHAR NOT NULL PRIMARY KEY, 
doub DOUBLE, fl FLOAT, inte INTEGER, lon BIGINT, smalli SMALLINT, tinyi 
TINYINT);
+conn.createStatement().execute(ddl);
+ddl = CREATE TABLE testUnsigned (k VARCHAR NOT NULL PRIMARY KEY, 
doub UNSIGNED_DOUBLE, fl UNSIGNED_FLOAT, inte UNSIGNED_INT, lon UNSIGNED_LONG, 
smalli UNSIGNED_SMALLINT, tinyi UNSIGNED_TINYINT);
+conn.createStatement().execute(ddl);
+conn.commit();
+} finally {
+closeStmtAndConn(stmt, conn);
+}
+}
+
+private void updateSignedTable(Connection conn, double data) throws 
Exception {
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO testSigned 
VALUES (?, ?, ?, ?, ?, ?, ?));
+stmt.setString(1, KEY);
+Double d = Double.valueOf(data);
+stmt.setDouble(2, d.doubleValue());
+stmt.setFloat(3, d.floatValue());
+stmt.setInt(4, d.intValue());
+stmt.setLong(5, d.longValue());
+stmt.setShort(6, d.shortValue());
+stmt.setByte(7, d.byteValue());
+stmt.executeUpdate();
+conn.commit();
+}
+
+private void updateUnsignedTable(Connection conn, double data) throws 
Exception {
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
testUnsigned VALUES (?, ?, ?, ?, ?, ?, ?));
+stmt.setString(1, KEY);
+Double d = Double.valueOf(data);
+stmt.setDouble(2, d.doubleValue());
+stmt.setFloat(3, d.floatValue());
+stmt.setInt(4, d.intValue());
+stmt.setLong(5, d.longValue());
+

[07/49] phoenix git commit: PHOENIX-1945 Phoenix tarball from assembly does not contain phoenix-[version]-server.jar

2015-06-24 Thread tdsilva
PHOENIX-1945 Phoenix tarball from assembly does not contain 
phoenix-[version]-server.jar


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c1e5c71a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c1e5c71a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c1e5c71a

Branch: refs/heads/json
Commit: c1e5c71abb84f0b2dcb3e1384e21a3f5a70a4d1a
Parents: b5ef25c
Author: Enis Soztutar e...@apache.org
Authored: Wed May 13 11:11:17 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Wed May 13 11:11:17 2015 -0700

--
 phoenix-assembly/pom.xml | 40 
 1 file changed, 20 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1e5c71a/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 5a73e7a..51ff74d 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -65,7 +65,7 @@
 /configuration
   /execution
   execution
-idpackage-to-tar/id
+idclient-minimal/id
 phasepackage/phase
 goals
   goalsingle/goal
@@ -73,51 +73,51 @@
 configuration
 finalNamephoenix-${project.version}/finalName
   attachfalse/attach
-  tarLongFileModegnu/tarLongFileMode
-  appendAssemblyIdfalse/appendAssemblyId
+  appendAssemblyIdtrue/appendAssemblyId
   descriptors
-descriptorsrc/build/package-to-tar-all.xml/descriptor
+   !--build the phoenix client jar, but without HBase code. --
+descriptorsrc/build/client-without-hbase.xml/descriptor
+   !-- build the phoenix client jar, but without HBase (or its 
depenencies). --
+descriptorsrc/build/client-minimal.xml/descriptor
+   !-- build the phoenix server side jar, that includes 
phoenix-hadoopX-compat, phoenix-hadoop-compat and antlr --
+descriptorsrc/build/server.xml/descriptor
+   !-- build the phoenix server side jar, that includes 
phoenix-hadoopX-compat and phoenix-hadoop-compat. --
+descriptorsrc/build/server-without-antlr.xml/descriptor
   /descriptors
-  tarLongFileModeposix/tarLongFileMode
 /configuration
   /execution
   execution
-idpackage-to-source-tar/id
+idpackage-to-tar/id
 phasepackage/phase
 goals
   goalsingle/goal
 /goals
 configuration
-finalNamephoenix-${project.version}-source/finalName
+finalNamephoenix-${project.version}/finalName
   attachfalse/attach
   tarLongFileModegnu/tarLongFileMode
   appendAssemblyIdfalse/appendAssemblyId
   descriptors
-descriptorsrc/build/src.xml/descriptor
+descriptorsrc/build/package-to-tar-all.xml/descriptor
   /descriptors
   tarLongFileModeposix/tarLongFileMode
 /configuration
-  /execution  
+  /execution
   execution
-idclient-minimal/id
+idpackage-to-source-tar/id
 phasepackage/phase
 goals
   goalsingle/goal
 /goals
 configuration
-finalNamephoenix-${project.version}/finalName
+finalNamephoenix-${project.version}-source/finalName
   attachfalse/attach
-  appendAssemblyIdtrue/appendAssemblyId
+  tarLongFileModegnu/tarLongFileMode
+  appendAssemblyIdfalse/appendAssemblyId
   descriptors
-   !--build the phoenix client jar, but without HBase code. --
-descriptorsrc/build/client-without-hbase.xml/descriptor
-   !-- build the phoenix client jar, but without HBase (or its 
depenencies). --
-descriptorsrc/build/client-minimal.xml/descriptor
-   !-- build the phoenix server side jar, that includes 
phoenix-hadoopX-compat, phoenix-hadoop-compat and antlr --
-descriptorsrc/build/server.xml/descriptor
-   !-- build the phoenix server side jar, that includes 
phoenix-hadoopX-compat and phoenix-hadoop-compat. --
-descriptorsrc/build/server-without-antlr.xml/descriptor
+descriptorsrc/build/src.xml/descriptor
   /descriptors
+  tarLongFileModeposix/tarLongFileMode
 /configuration
   /execution
 /executions



[38/49] phoenix git commit: PHOENIX 1968: Should support saving arrays

2015-06-24 Thread tdsilva
PHOENIX 1968: Should support saving arrays


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/31a1ca6c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/31a1ca6c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/31a1ca6c

Branch: refs/heads/json
Commit: 31a1ca6caefb45430969fc7c0d28b50bb515c605
Parents: db90196
Author: ravimagham ravimag...@apache.org
Authored: Thu Jun 11 11:50:21 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Thu Jun 11 11:50:21 2015 -0700

--
 .../apache/phoenix/spark/PhoenixSparkIT.scala   | 21 
 .../phoenix/spark/PhoenixRecordWritable.scala   | 25 
 2 files changed, 41 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/31a1ca6c/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
--
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 42e8676..5f256e6 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -415,4 +415,25 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
 
 results.toList shouldEqual checkResults
   }
+
+  test(Can save arrays back to phoenix) {
+val dataSet = List((2L, Array(String1, String2, String3)))
+
+sc
+  .parallelize(dataSet)
+  .saveToPhoenix(
+ARRAY_TEST_TABLE,
+Seq(ID,VCARRAY),
+zkUrl = Some(quorumAddress)
+  )
+
+// Load the results back
+val stmt = conn.createStatement()
+val rs = stmt.executeQuery(SELECT VCARRAY FROM ARRAY_TEST_TABLE WHERE ID 
= 2)
+rs.next()
+val sqlArray = rs.getArray(1).getArray().asInstanceOf[Array[String]]
+
+// Verify the arrays are equal
+sqlArray shouldEqual dataSet(0)._2
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/31a1ca6c/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
--
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
index 67e0bd2..3977657 100644
--- 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
+++ 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
@@ -16,11 +16,12 @@ package org.apache.phoenix.spark
 import java.sql.{PreparedStatement, ResultSet}
 import org.apache.hadoop.mapreduce.lib.db.DBWritable
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder
-import org.apache.phoenix.schema.types.{PDate, PhoenixArray}
+import org.apache.phoenix.schema.types.{PDataType, PDate, PhoenixArray}
 import org.joda.time.DateTime
 import scala.collection.{immutable, mutable}
 import scala.collection.JavaConversions._
 
+
 class PhoenixRecordWritable(var encodedColumns: String) extends DBWritable {
   val upsertValues = mutable.ArrayBuffer[Any]()
   val resultMap = mutable.Map[String, AnyRef]()
@@ -44,13 +45,27 @@ class PhoenixRecordWritable(var encodedColumns: String) 
extends DBWritable {
 upsertValues.zip(columns).zipWithIndex.foreach {
   case ((v, c), i) = {
 if (v != null) {
+
   // Both Java and Joda dates used to work in 4.2.3, but now they must 
be java.sql.Date
+  // Can override any other types here as needed
   val (finalObj, finalType) = v match {
-case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE.getSqlType)
-case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE.getSqlType)
-case _ = (v, c.getSqlType)
+case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE)
+case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE)
+case _ = (v, c.getPDataType)
+  }
+
+  // Save as array or object
+  finalObj match {
+case obj: Array[AnyRef] = {
+  // Create a java.sql.Array, need to lookup the base sql type name
+  val sqlArray = statement.getConnection.createArrayOf(
+PDataType.arrayBaseType(finalType).getSqlTypeName,
+obj
+  )
+  statement.setArray(i + 1, sqlArray)
+}
+case _ = statement.setObject(i + 1, finalObj)
   }
-  statement.setObject(i + 1, finalObj, finalType)
 } else {
   statement.setNull(i 

[14/49] phoenix git commit: PHOENIX-1995 client uberjar doesn't support dfs

2015-06-24 Thread tdsilva
PHOENIX-1995 client uberjar doesn't support dfs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/981ed472
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/981ed472
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/981ed472

Branch: refs/heads/json
Commit: 981ed472cb597440fe7c3a2aaa088b103f8f7352
Parents: a4b4e0e
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 20 12:29:36 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed May 20 12:55:23 2015 -0700

--
 phoenix-assembly/src/build/client.xml | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/981ed472/phoenix-assembly/src/build/client.xml
--
diff --git a/phoenix-assembly/src/build/client.xml 
b/phoenix-assembly/src/build/client.xml
index 101ccd6..4bd4544 100644
--- a/phoenix-assembly/src/build/client.xml
+++ b/phoenix-assembly/src/build/client.xml
@@ -29,6 +29,16 @@
 formatjar/format
   /formats
   includeBaseDirectoryfalse/includeBaseDirectory
+
+  containerDescriptorHandlers
+containerDescriptorHandler
+  !--
+  aggregate SPI's so that things like HDFS FileSystem works in uberjar
+  http://docs.oracle.com/javase/tutorial/sound/SPI-intro.html
+  --
+  handlerNamemetaInf-services/handlerName
+/containerDescriptorHandler
+  /containerDescriptorHandlers
   
   componentDescriptors
 componentDescriptorsrc/build/components-minimal.xml/componentDescriptor



[49/49] phoenix git commit: PHOENIX-2029 Queries are making two rpc calls for getTable

2015-06-24 Thread tdsilva
PHOENIX-2029 Queries are making two rpc calls for getTable


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/03a6ac00
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/03a6ac00
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/03a6ac00

Branch: refs/heads/json
Commit: 03a6ac00286f9fbd0466b5739c4036ccb3ad6afb
Parents: d1f7ded
Author: Thomas D'Silva twdsi...@gmail.com
Authored: Mon Jun 8 15:30:40 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Wed Jun 17 11:21:43 2015 -0700

--
 .../org/apache/phoenix/rpc/UpdateCacheIT.java   | 139 +++
 .../apache/phoenix/compile/QueryCompiler.java   |   2 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   6 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  26 ++--
 4 files changed, 156 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/03a6ac00/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
new file mode 100644
index 000..c657e41
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
@@ -0,0 +1,139 @@
+package org.apache.phoenix.rpc;
+
+import static org.apache.phoenix.util.TestUtil.INDEX_DATA_SCHEMA;
+import static org.apache.phoenix.util.TestUtil.MUTABLE_INDEX_DATA_TABLE;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.end2end.Shadower;
+import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
+import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.MetaDataClient;
+import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.types.PVarchar;
+import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.collect.Maps;
+
+/**
+ * Verifies the number of rpcs calls from {@link MetaDataClient} updateCache() 
+ * for transactional and non-transactional tables.
+ */
+public class UpdateCacheIT extends BaseHBaseManagedTimeIT {
+   
+   public static final int NUM_MILLIS_IN_DAY = 8640;
+
+@Before
+public void setUp() throws SQLException {
+ensureTableCreated(getUrl(), MUTABLE_INDEX_DATA_TABLE);
+}
+
+   @BeforeClass
+@Shadower(classBeingShadowed = BaseHBaseManagedTimeIT.class)
+public static void doSetup() throws Exception {
+MapString,String props = Maps.newHashMapWithExpectedSize(3);
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+   
+   public static void validateRowKeyColumns(ResultSet rs, int i) throws 
SQLException {
+   assertTrue(rs.next());
+   assertEquals(rs.getString(1), varchar + String.valueOf(i));
+   assertEquals(rs.getString(2), char + String.valueOf(i));
+   assertEquals(rs.getInt(3), i);
+   assertEquals(rs.getInt(4), i);
+   assertEquals(rs.getBigDecimal(5), new BigDecimal(i*0.5d));
+   Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+   assertEquals(rs.getDate(6), date);
+   }
+   
+   public static void setRowKeyColumns(PreparedStatement stmt, int i) 
throws SQLException {
+// insert row
+stmt.setString(1, varchar + String.valueOf(i));
+stmt.setString(2, char + String.valueOf(i));
+stmt.setInt(3, i);
+stmt.setLong(4, i);
+stmt.setBigDecimal(5, new BigDecimal(i*0.5d));
+Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+stmt.setDate(6, date);
+}
+   
+   @Test
+   public void