git commit: PHOENIX-939 Generalize SELECT expressions for Pig Loader (Ravi)

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 c75eae1a9 - 2d9347e0b


PHOENIX-939 Generalize SELECT expressions for Pig Loader (Ravi)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2d9347e0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2d9347e0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2d9347e0

Branch: refs/heads/4.0
Commit: 2d9347e0ba022ebc27143348de590baf3cc1b234
Parents: c75eae1
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 00:33:20 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 00:34:39 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   | 84 +++
 .../apache/phoenix/pig/PhoenixHBaseLoader.java  |  3 +
 .../phoenix/pig/PhoenixPigConfiguration.java| 61 +++---
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  |  5 ++
 .../phoenix/pig/hadoop/PhoenixRecordReader.java | 17 ++--
 .../phoenix/pig/util/PhoenixPigSchemaUtil.java  | 13 ++-
 .../pig/util/SqlQueryToColumnInfoFunction.java  | 85 
 .../util/SqlQueryToColumnInfoFunctionTest.java  | 75 +
 8 files changed, 327 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2d9347e0/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 28afb9a..9f118a6 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -422,6 +422,90 @@ public class PhoenixHBaseLoaderIT {
 assertEquals(0, rs.getInt(MIN_SAL));
 assertEquals(270, rs.getInt(MAX_SAL));
 }
+   
+   /**
+ * Test for Sequence
+ * @throws Exception
+ */
+@Test
+public void testDataForSQLQueryWithSequences() throws Exception {
+
+ //create the table
+ String ddl = CREATE TABLE  + TABLE_FULL_NAME
++  (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, AGE 
INTEGER) ;
+
+conn.createStatement().execute(ddl);
+
+String sequenceDdl = CREATE SEQUENCE my_sequence;
+
+conn.createStatement().execute(sequenceDdl);
+   
+//prepare data with 10 rows having age 25 and the other 30.
+final String dml = UPSERT INTO  + TABLE_FULL_NAME +  VALUES(?,?,?);
+PreparedStatement stmt = conn.prepareStatement(dml);
+int rows = 20;
+for(int i = 0 ; i  rows; i++) {
+stmt.setInt(1, i);
+stmt.setString(2, a+i);
+stmt.setInt(3, (i % 2 == 0) ? 25 : 30);
+stmt.execute();
+}
+conn.commit();
+
+//sql query load data and filter rows whose age is  25
+final String sqlQuery =  SELECT NEXT VALUE FOR my_sequence AS 
my_seq,ID,NAME,AGE FROM  + TABLE_FULL_NAME +  WHERE AGE  25;
+pigServer.registerQuery(String.format(
+A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');, sqlQuery,
+zkQuorum));
+
+
+IteratorTuple iterator = pigServer.openIterator(A);
+int recordsRead = 0;
+while (iterator.hasNext()) {
+Tuple tuple = iterator.next();
+System.out.println( the field value is +tuple.get(1));
+recordsRead++;
+}
+assertEquals(rows/2, recordsRead);
+}
+   
+@Test
+public void testDataForSQLQueryWithFunctions() throws Exception {
+
+ //create the table
+ String ddl = CREATE TABLE  + TABLE_FULL_NAME
++  (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR) ;
+
+conn.createStatement().execute(ddl);
+
+final String dml = UPSERT INTO  + TABLE_FULL_NAME +  VALUES(?,?);
+PreparedStatement stmt = conn.prepareStatement(dml);
+int rows = 20;
+for(int i = 0 ; i  rows; i++) {
+stmt.setInt(1, i);
+stmt.setString(2, a+i);
+stmt.execute();
+}
+conn.commit();
+
+//sql query
+final String sqlQuery =  SELECT UPPER(NAME) AS n FROM  + 
TABLE_FULL_NAME +  ORDER BY ID ;
+
+pigServer.registerQuery(String.format(
+A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');, sqlQuery,
+zkQuorum));
+
+
+IteratorTuple iterator = pigServer.openIterator(A);
+int i = 0;
+while (iterator.hasNext()) 

git commit: PHOENIX-939 Generalize SELECT expressions for Pig Loader (Ravi)

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 3fd64258a - 437bf6881


PHOENIX-939 Generalize SELECT expressions for Pig Loader (Ravi)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/437bf688
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/437bf688
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/437bf688

Branch: refs/heads/master
Commit: 437bf6881eea425c28f67f148f5be6fc3093fddc
Parents: 3fd6425
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 00:33:20 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 00:41:27 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   | 84 +++
 .../apache/phoenix/pig/PhoenixHBaseLoader.java  |  3 +
 .../phoenix/pig/PhoenixPigConfiguration.java| 61 +++---
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  |  5 ++
 .../phoenix/pig/hadoop/PhoenixRecordReader.java | 17 ++--
 .../phoenix/pig/util/PhoenixPigSchemaUtil.java  | 13 ++-
 .../pig/util/SqlQueryToColumnInfoFunction.java  | 85 
 .../util/SqlQueryToColumnInfoFunctionTest.java  | 75 +
 8 files changed, 327 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/437bf688/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 28afb9a..9f118a6 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -422,6 +422,90 @@ public class PhoenixHBaseLoaderIT {
 assertEquals(0, rs.getInt(MIN_SAL));
 assertEquals(270, rs.getInt(MAX_SAL));
 }
+   
+   /**
+ * Test for Sequence
+ * @throws Exception
+ */
+@Test
+public void testDataForSQLQueryWithSequences() throws Exception {
+
+ //create the table
+ String ddl = CREATE TABLE  + TABLE_FULL_NAME
++  (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR, AGE 
INTEGER) ;
+
+conn.createStatement().execute(ddl);
+
+String sequenceDdl = CREATE SEQUENCE my_sequence;
+
+conn.createStatement().execute(sequenceDdl);
+   
+//prepare data with 10 rows having age 25 and the other 30.
+final String dml = UPSERT INTO  + TABLE_FULL_NAME +  VALUES(?,?,?);
+PreparedStatement stmt = conn.prepareStatement(dml);
+int rows = 20;
+for(int i = 0 ; i  rows; i++) {
+stmt.setInt(1, i);
+stmt.setString(2, a+i);
+stmt.setInt(3, (i % 2 == 0) ? 25 : 30);
+stmt.execute();
+}
+conn.commit();
+
+//sql query load data and filter rows whose age is  25
+final String sqlQuery =  SELECT NEXT VALUE FOR my_sequence AS 
my_seq,ID,NAME,AGE FROM  + TABLE_FULL_NAME +  WHERE AGE  25;
+pigServer.registerQuery(String.format(
+A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');, sqlQuery,
+zkQuorum));
+
+
+IteratorTuple iterator = pigServer.openIterator(A);
+int recordsRead = 0;
+while (iterator.hasNext()) {
+Tuple tuple = iterator.next();
+System.out.println( the field value is +tuple.get(1));
+recordsRead++;
+}
+assertEquals(rows/2, recordsRead);
+}
+   
+@Test
+public void testDataForSQLQueryWithFunctions() throws Exception {
+
+ //create the table
+ String ddl = CREATE TABLE  + TABLE_FULL_NAME
++  (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR) ;
+
+conn.createStatement().execute(ddl);
+
+final String dml = UPSERT INTO  + TABLE_FULL_NAME +  VALUES(?,?);
+PreparedStatement stmt = conn.prepareStatement(dml);
+int rows = 20;
+for(int i = 0 ; i  rows; i++) {
+stmt.setInt(1, i);
+stmt.setString(2, a+i);
+stmt.execute();
+}
+conn.commit();
+
+//sql query
+final String sqlQuery =  SELECT UPPER(NAME) AS n FROM  + 
TABLE_FULL_NAME +  ORDER BY ID ;
+
+pigServer.registerQuery(String.format(
+A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');, sqlQuery,
+zkQuorum));
+
+
+IteratorTuple iterator = pigServer.openIterator(A);
+int i = 0;
+while 

Jenkins build is back to normal : Phoenix | 4.0 | Hadoop1 #173

2014-06-08 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-4.0-hadoop1/173/changes



Apache-Phoenix | Master | Hadoop1 | Build Successful

2014-06-08 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-939 Generalize SELECT expressions for Pig Loader (Ravi)



git commit: PHOENIX-19 Enhance JDBC connection of Phoenix to support connecting to a Secure HBase cluster (Anil Gupta)

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 2d9347e0b - 3ef7df14c


PHOENIX-19 Enhance JDBC connection of Phoenix to support connecting to a Secure 
HBase cluster (Anil Gupta)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3ef7df14
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3ef7df14
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3ef7df14

Branch: refs/heads/4.0
Commit: 3ef7df14ce125996ce77d1bf9e4e06abba93
Parents: 2d9347e
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 10:45:38 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 10:45:38 2014 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java | 100 ---
 .../query/ConnectionQueryServicesImpl.java  |  16 ++-
 .../org/apache/phoenix/query/QueryServices.java |   3 +
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java |   9 ++
 4 files changed, 112 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3ef7df14/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 8cfe3c2..10c24b8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -169,7 +169,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 StringTokenizer tokenizer = new StringTokenizer(url == null ?  : 
url.substring(PhoenixRuntime.JDBC_PROTOCOL.length()),DELIMITERS, true);
 int i = 0;
 boolean isMalformedUrl = false;
-String[] tokens = new String[3];
+String[] tokens = new String[5];
 String token = null;
 while (tokenizer.hasMoreTokens()  
!(token=tokenizer.nextToken()).equals(TERMINATOR)  tokenizer.hasMoreTokens() 
 i  tokens.length) {
 token = tokenizer.nextToken();
@@ -188,14 +188,41 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 try {
 port = Integer.parseInt(tokens[1]);
 isMalformedUrl = port  0;
+if(i == 4){
+   if(!tokens[2].endsWith(.keytab)){
+   isMalformedUrl = true;
+   }
+   tokens[4] = tokens[3];
+   tokens[3] = tokens[2];
+   tokens[2] = null;
+}
 } catch (NumberFormatException e) {
 // If we have 3 tokens, then the second one must be a 
port.
 // If we only have 2 tokens, the second one might be 
the root node:
 // Assume that is the case if we get a 
NumberFormatException
-if (! (isMalformedUrl = i == 3) ) {
+if (!tokens[1].startsWith(/)) {
+isMalformedUrl = true;
+}
+if (i == 2) {
+tokens[4] = null;
+tokens[3] = null;
+tokens[2] = tokens[1];
+tokens[1] = null;
+} else if (i == 3) {
+tokens[4] = tokens[2];
+tokens[3] = tokens[1];
+tokens[2] = null;
+tokens[1] = null;
+} else if (i == 4) {
+tokens[4] = tokens[3];
+tokens[3] = tokens[2];
+tokens[2] = tokens[1];
+tokens[1] = null;
+} else if (i == 5) {
+tokens[4] = tokens[3];
+tokens[3] = tokens[2];
 tokens[2] = tokens[1];
 }
-
 }
 }
 }
@@ -203,13 +230,15 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 throw new 
SQLExceptionInfo.Builder(SQLExceptionCode.MALFORMED_CONNECTION_URL)
 .setMessage(url).build().buildException();
 }
-return new ConnectionInfo(tokens[0],port,tokens[2]);
+return new ConnectionInfo(tokens[0],port,tokens[2], tokens[3], 

git commit: PHOENIX-19 Enhance JDBC connection of Phoenix to support connecting to a Secure HBase cluster (Anil Gupta)

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 3ef7df14c - 40bc58a5a


PHOENIX-19 Enhance JDBC connection of Phoenix to support connecting to a Secure 
HBase cluster (Anil Gupta)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/40bc58a5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/40bc58a5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/40bc58a5

Branch: refs/heads/4.0
Commit: 40bc58a5a1aa5eeebb17e3d33c37b70997aaecb1
Parents: 3ef7df1
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 11:08:41 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 11:08:41 2014 -0700

--
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java| 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/40bc58a5/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 716ba3d..be118c5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -255,8 +255,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 private void openConnection() throws SQLException {
 try {
 // check if we need to authenticate with kerberos
-String clientKeytab = config.get(HBASE_CLIENT_KEYTAB);
-String clientPrincipal = config.get(HBASE_CLIENT_PRINCIPAL);
+String clientKeytab = this.getProps().get(HBASE_CLIENT_KEYTAB);
+String clientPrincipal = 
this.getProps().get(HBASE_CLIENT_PRINCIPAL);
 if (clientKeytab != null  clientPrincipal != null) {
 logger.info(Trying to connect to a secure cluster with 
keytab: + clientKeytab);
 UserGroupInformation.setConfiguration(config);



Apache-Phoenix | 4.0 | Hadoop1 | Build Successful

2014-06-08 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-19 Enhance JDBC connection of Phoenix to support connecting to a Secure HBase cluster (Anil Gupta)



Apache-Phoenix | Master | Hadoop1 | Build Successful

2014-06-08 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-19 Enhance JDBC connection of Phoenix to support connecting to a Secure HBase cluster (Anil Gupta)



git commit: PHOENIX-1033 Update HBase version to 0.94.19 to make connection to secure cluster easier

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 e799a0f36 - b2041c2af


PHOENIX-1033 Update HBase version to 0.94.19 to make connection to secure 
cluster easier


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b2041c2a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b2041c2a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b2041c2a

Branch: refs/heads/3.0
Commit: b2041c2afad89973ade53c7089491fe2693b0c54
Parents: e799a0f
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 11:25:39 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 11:25:39 2014 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2041c2a/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 0d37a55..2fe91f5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -75,7 +75,7 @@
 hadoop-two.version2.0.4-alpha/hadoop-two.version
 
 !-- Dependency versions --
-hbase.version0.94.14/hbase.version
+hbase.version0.94.19/hbase.version
 commons-cli.version1.2/commons-cli.version
 hadoop.version1.0.4/hadoop.version
 pig.version0.12.0/pig.version



git commit: PHOENIX-1032 Don't compile alternate plans if query is a point lookup

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 156e6cc2d - 318921eae


PHOENIX-1032 Don't compile alternate plans if query is a point lookup


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/318921ea
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/318921ea
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/318921ea

Branch: refs/heads/master
Commit: 318921eaef93dd5315c144b0f9c22dcf4febabc2
Parents: 156e6cc
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 11:40:15 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 11:40:15 2014 -0700

--
 .../java/org/apache/phoenix/optimize/QueryOptimizer.java| 9 +
 1 file changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/318921ea/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
index 76276e4..53e6939 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
@@ -76,11 +76,12 @@ public class QueryOptimizer {
 }
 
 public QueryPlan optimize(QueryPlan dataPlan, PhoenixStatement statement, 
List? extends PDatum targetColumns, ParallelIteratorFactory 
parallelIteratorFactory) throws SQLException {
-// Get the statement as it's been normalized now
-// TODO: the recompile for the index tables could skip the normalize 
step
 SelectStatement select = (SelectStatement)dataPlan.getStatement();
-// TODO: consider not even compiling index plans if we have a point 
lookup
-if (!useIndexes || select.isJoin() || 
dataPlan.getContext().getResolver().getTables().size()  1) {
+// Exit early if we have a point lookup as we can't get better than 
that
+if (!useIndexes 
+|| select.isJoin() 
+|| dataPlan.getContext().getResolver().getTables().size()  1
+|| dataPlan.getContext().getScanRanges().isPointLookup()) {
 return dataPlan;
 }
 PTable dataTable = dataPlan.getTableRef().getTable();



git commit: PHOENIX-1034 Move validate/reserve of sequences into query compile

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 bf395def6 - c3067a754


PHOENIX-1034 Move validate/reserve of sequences into query compile


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c3067a75
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c3067a75
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c3067a75

Branch: refs/heads/3.0
Commit: c3067a754541e9d315960200f0136bf696fd2db5
Parents: bf395de
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 12:47:53 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 12:59:33 2014 -0700

--
 .../apache/phoenix/compile/SequenceManager.java |  2 +-
 .../coprocessor/SequenceRegionObserver.java | 13 ++--
 .../phoenix/jdbc/PhoenixPreparedStatement.java  |  5 +-
 .../apache/phoenix/jdbc/PhoenixStatement.java   | 62 +++-
 .../apache/phoenix/parse/BindableStatement.java |  2 -
 .../apache/phoenix/parse/DeleteStatement.java   |  6 --
 .../apache/phoenix/parse/ExplainStatement.java  |  6 --
 .../apache/phoenix/parse/MutableStatement.java  |  6 --
 .../apache/phoenix/parse/SelectStatement.java   |  6 --
 .../phoenix/query/ConnectionQueryServices.java  |  2 +-
 .../query/ConnectionQueryServicesImpl.java  |  6 +-
 .../query/ConnectionlessQueryServicesImpl.java  |  4 +-
 .../query/DelegateConnectionQueryServices.java  |  2 +-
 .../org/apache/phoenix/schema/Sequence.java | 15 ++---
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  |  5 --
 15 files changed, 59 insertions(+), 83 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3067a75/phoenix-core/src/main/java/org/apache/phoenix/compile/SequenceManager.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/SequenceManager.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/SequenceManager.java
index a5f37f8..8e71c3b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/SequenceManager.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/SequenceManager.java
@@ -138,7 +138,7 @@ public class SequenceManager {
 return expression;
 }
 
-public void validateSequences(Sequence.Action action) throws SQLException {
+public void validateSequences(Sequence.ValueOp action) throws SQLException 
{
 if (sequenceMap == null || sequenceMap.isEmpty()) {
 return;
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3067a75/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
index 46834cf..875bb0c 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
@@ -67,7 +67,6 @@ import org.apache.phoenix.util.ServerUtil;
  * @since 3.0.0
  */
 public class SequenceRegionObserver extends BaseRegionObserver {
-public enum Op {CREATE_SEQUENCE, DROP_SEQUENCE, RETURN_SEQUENCE};
 public static final String OPERATION_ATTRIB = SEQUENCE_OPERATION;
 public static final String MAX_TIMERANGE_ATTRIB = MAX_TIMERANGE;
 public static final String CURRENT_VALUE_ATTRIB = CURRENT_VALUE;
@@ -114,7 +113,7 @@ public class SequenceRegionObserver extends 
BaseRegionObserver {
 byte[] cf = entry.getKey();
 for (Map.Entrybyte[],Long kvEntry : 
entry.getValue().entrySet()) {
 get.addColumn(cf, kvEntry.getKey());
-validateOnly = (Sequence.Action.VALIDATE.ordinal() == 
kvEntry.getValue().intValue());
+validateOnly = 
(Sequence.ValueOp.VALIDATE_SEQUENCE.ordinal() == kvEntry.getValue().intValue());
 }
 }
 Result result = region.get(get);
@@ -167,7 +166,7 @@ public class SequenceRegionObserver extends 
BaseRegionObserver {
 if (opBuf == null) {
 return null;
 }
-Op op = Op.values()[opBuf[0]];
+Sequence.MetaOp op = Sequence.MetaOp.values()[opBuf[0]];
 KeyValue keyValue = 
append.getFamilyMap().values().iterator().next().iterator().next();
 
 long clientTimestamp = HConstants.LATEST_TIMESTAMP;
@@ -175,7 +174,7 @@ public class SequenceRegionObserver extends 
BaseRegionObserver {
 long maxGetTimestamp = HConstants.LATEST_TIMESTAMP;
 boolean hadClientTimestamp;
 byte[] clientTimestampBuf = null;
-if (op 

git commit: PHOENIX-1034 Move validate/reserve of sequences into query compile

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 2ad434d6e - ea9232a31


PHOENIX-1034 Move validate/reserve of sequences into query compile


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ea9232a3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ea9232a3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ea9232a3

Branch: refs/heads/master
Commit: ea9232a31e0bed23c6e746e91d3bb3250dc71ae9
Parents: 2ad434d
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 13:01:55 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 13:02:50 2014 -0700

--
 .../phoenix/coprocessor/SequenceRegionObserver.java |  2 +-
 .../apache/phoenix/jdbc/PhoenixPreparedStatement.java   |  4 ++--
 .../java/org/apache/phoenix/jdbc/PhoenixStatement.java  | 12 ++--
 .../phoenix/query/ConnectionQueryServicesImpl.java  |  2 +-
 .../main/java/org/apache/phoenix/schema/Sequence.java   |  4 ++--
 5 files changed, 12 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea9232a3/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
index a21a61a..97a9a47 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
@@ -129,7 +129,7 @@ public class SequenceRegionObserver extends 
BaseRegionObserver {
long value = 
PDataType.LONG.getCodec().decodeLong(cq.getValueArray(), cq.getValueOffset(), 
SortOrder.getDefault());
 get.addColumn(cf, CellUtil.cloneQualifier(cq));
-validateOnly = 
(Sequence.ValueOp.VALIDATE_SEQUENCES.ordinal() == value);
+validateOnly = 
(Sequence.ValueOp.VALIDATE_SEQUENCE.ordinal() == value);
 }
 }
 Result result = region.get(get);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea9232a3/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixPreparedStatement.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixPreparedStatement.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixPreparedStatement.java
index d75eb28..7eea568 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixPreparedStatement.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixPreparedStatement.java
@@ -189,7 +189,7 @@ public class PhoenixPreparedStatement extends 
PhoenixStatement implements Prepar
 }
 try {
 // Just compile top level query without optimizing to get 
ResultSetMetaData
-QueryPlan plan = statement.compilePlan(this, 
Sequence.ValueOp.VALIDATE_SEQUENCES);
+QueryPlan plan = statement.compilePlan(this, 
Sequence.ValueOp.VALIDATE_SEQUENCE);
 return new PhoenixResultSetMetaData(this.getConnection(), 
plan.getProjector());
 } finally {
 int lastSetBit = 0;
@@ -212,7 +212,7 @@ public class PhoenixPreparedStatement extends 
PhoenixStatement implements Prepar
 }
 }
 try {
-StatementPlan plan = statement.compilePlan(this, 
Sequence.ValueOp.VALIDATE_SEQUENCES);
+StatementPlan plan = statement.compilePlan(this, 
Sequence.ValueOp.VALIDATE_SEQUENCE);
 return plan.getParameterMetaData();
 } finally {
 int lastSetBit = 0;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea9232a3/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
index 529a40a..d4c677b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
@@ -195,7 +195,7 @@ public class PhoenixStatement implements Statement, 
SQLCloseable, org.apache.pho
 }
 
 protected QueryPlan optimizeQuery(CompilableStatement stmt) throws 
SQLException {
-QueryPlan plan = stmt.compilePlan(this, 
Sequence.ValueOp.RESERVE_SEQUENCES);
+QueryPlan plan = stmt.compilePlan(this, 

Build failed in Jenkins: Phoenix | Master | Hadoop1 #248

2014-06-08 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-master-hadoop1/248/changes

Changes:

[jtaylor] PHOENIX-1034 Move validate/reserve of sequences into query compile

[jtaylor] PHOENIX-1034 Move validate/reserve of sequences into query compile

--
[...truncated 1366 lines...]
at 
org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
at 
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
at 
org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)

testPointInTimeSequence[CREATE INDEX ATABLE_IDX ON aTable (a_integer) INCLUDE ( 
   A_STRING, B_STRING, A_DATE)](org.apache.phoenix.end2end.NotQueryIT)  
Time elapsed: 0.554 sec   ERROR!
java.lang.NullPointerException: null
at 
org.apache.phoenix.compile.SequenceManager$SequenceTuple.init(SequenceManager.java:92)
at 
org.apache.phoenix.compile.SequenceManager.newSequenceTuple(SequenceManager.java:80)
at 
org.apache.phoenix.iterate.SequenceResultIterator.next(SequenceResultIterator.java:47)
at 
org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixResultSet.java:732)
at 
org.apache.phoenix.end2end.QueryIT.testPointInTimeSequence(QueryIT.java:433)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at 
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
at 
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at 
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
at 
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at 
org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at 
org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
at org.junit.runners.Suite.runChild(Suite.java:127)
at org.junit.runners.Suite.runChild(Suite.java:26)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
at 
org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at 
org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
at org.junit.runners.Suite.runChild(Suite.java:127)
at org.junit.runners.Suite.runChild(Suite.java:26)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
at org.junit.runner.JUnitCore.run(JUnitCore.java:138)
at 
org.apache.maven.surefire.junitcore.JUnitCoreWrapper.createRequestAndRun(JUnitCoreWrapper.java:113)
at 
org.apache.maven.surefire.junitcore.JUnitCoreWrapper.executeLazy(JUnitCoreWrapper.java:94)
at 
org.apache.maven.surefire.junitcore.JUnitCoreWrapper.execute(JUnitCoreWrapper.java:58)
at 
org.apache.maven.surefire.junitcore.JUnitCoreProvider.invoke(JUnitCoreProvider.java:134)
at 
org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
at 
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
at 

git commit: PHOENIX-1034 Move validate/reserve of sequences into query compile

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 6e9d02c86 - 0dfa31c35


PHOENIX-1034 Move validate/reserve of sequences into query compile


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0dfa31c3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0dfa31c3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0dfa31c3

Branch: refs/heads/4.0
Commit: 0dfa31c35f595d66405c7121c9ce865b7d889805
Parents: 6e9d02c
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 14:02:35 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 14:08:29 2014 -0700

--
 .../DefaultParallelIteratorsRegionSplitterIT.java |  4 +++-
 ...SkipRangeParallelIteratorRegionSplitterIT.java |  4 +++-
 .../phoenix/compile/CreateIndexCompiler.java  |  2 +-
 .../phoenix/compile/CreateTableCompiler.java  |  2 +-
 .../org/apache/phoenix/compile/JoinCompiler.java  |  2 +-
 .../apache/phoenix/compile/PostDDLCompiler.java   |  3 ++-
 .../org/apache/phoenix/compile/QueryCompiler.java | 14 --
 .../apache/phoenix/compile/StatementContext.java  | 18 --
 .../apache/phoenix/compile/UpsertCompiler.java|  2 +-
 .../apache/phoenix/optimize/QueryOptimizer.java   |  5 +++--
 .../iterate/AggregateResultScannerTest.java   |  4 +++-
 11 files changed, 38 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0dfa31c3/phoenix-core/src/it/java/org/apache/phoenix/end2end/DefaultParallelIteratorsRegionSplitterIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DefaultParallelIteratorsRegionSplitterIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DefaultParallelIteratorsRegionSplitterIT.java
index 152b955..3ebbc8b 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DefaultParallelIteratorsRegionSplitterIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DefaultParallelIteratorsRegionSplitterIT.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.compile.SequenceManager;
 import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.iterate.DefaultParallelIteratorRegionSplitter;
 import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -61,7 +62,8 @@ public class DefaultParallelIteratorsRegionSplitterIT extends 
BaseParallelIterat
 TableRef tableRef = getTableRef(conn, ts);
 PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
 final ListHRegionLocation regions =  
pconn.getQueryServices().getAllTableRegions(tableRef.getTable().getPhysicalName().getBytes());
-StatementContext context = new StatementContext(new 
PhoenixStatement(pconn), null, scan);
+PhoenixStatement statement = new PhoenixStatement(pconn);
+StatementContext context = new StatementContext(statement, null, scan, 
new SequenceManager(statement));
 DefaultParallelIteratorRegionSplitter splitter = new 
DefaultParallelIteratorRegionSplitter(context, tableRef, 
HintNode.EMPTY_HINT_NODE) {
 @Override
 protected ListHRegionLocation getAllRegions() throws 
SQLException {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0dfa31c3/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipRangeParallelIteratorRegionSplitterIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipRangeParallelIteratorRegionSplitterIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipRangeParallelIteratorRegionSplitterIT.java
index 20ce768..d4a40f0 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipRangeParallelIteratorRegionSplitterIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipRangeParallelIteratorRegionSplitterIT.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.compile.ColumnResolver;
 import org.apache.phoenix.compile.ScanRanges;
+import org.apache.phoenix.compile.SequenceManager;
 import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.filter.SkipScanFilter;
 import org.apache.phoenix.iterate.SkipRangeParallelIteratorRegionSplitter;
@@ -356,7 +357,8 @@ public class SkipRangeParallelIteratorRegionSplitterIT 
extends BaseClientManaged
 
 };
 PhoenixConnection connection = DriverManager.getConnection(getUrl(), 
TEST_PROPERTIES).unwrap(PhoenixConnection.class);
-StatementContext context = new 

git commit: Update readme files and remove disclaimer

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 9729c171c - 41ed8388a


Update readme files and remove disclaimer


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/41ed8388
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/41ed8388
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/41ed8388

Branch: refs/heads/3.0
Commit: 41ed8388ac89d05ab9571d14bdd36e9b3ee91185
Parents: 9729c17
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 15:02:21 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 15:03:10 2014 -0700

--
 BUILDING   |  2 +-
 DISCLAIMER |  8 
 README | 10 +-
 README.md  |  4 ++--
 4 files changed, 8 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/41ed8388/BUILDING
--
diff --git a/BUILDING b/BUILDING
index 645f805..aac954c 100644
--- a/BUILDING
+++ b/BUILDING
@@ -60,5 +60,5 @@ Findbugs report is generated in /target/site
 
 ## Generate Apache Web Site
 ===
-checkout https://svn.apache.org/repos/asf/incubator/phoenix
+checkout https://svn.apache.org/repos/asf/phoenix
 $ build.sh

http://git-wip-us.apache.org/repos/asf/phoenix/blob/41ed8388/DISCLAIMER
--
diff --git a/DISCLAIMER b/DISCLAIMER
deleted file mode 100644
index 07eef20..000
--- a/DISCLAIMER
+++ /dev/null
@@ -1,8 +0,0 @@
-Apache Phoenix is an effort undergoing incubation at The Apache
-Software Foundation (ASF), sponsored by the Apache Incubator PMC.
-Incubation is required of all newly accepted projects until a further
-review indicates that the infrastructure, communications, and decision
-making process have stabilized in a manner consistent with other
-successful ASF projects. While incubation status is not necessarily a
-reflection of the completeness or stability of the code, it does
-indicate that the project has yet to be fully endorsed by the ASF.

http://git-wip-us.apache.org/repos/asf/phoenix/blob/41ed8388/README
--
diff --git a/README b/README
index 7f1873e..f98519a 100644
--- a/README
+++ b/README
@@ -1,4 +1,4 @@
-Apache Phoenix [1] Incubator project is a SQL skin over HBase delivered as a 
client-embedded 
+Apache Phoenix [1] project is a SQL skin over HBase delivered as a 
client-embedded 
 JDBC driver targeting low latency queries over HBase data. Apache Phoenix 
takes your SQL query, 
 compiles it into a series of HBase scans, and orchestrates the running of 
those scans to produce 
 regular JDBC result sets.
@@ -13,11 +13,11 @@ Apache Phoenix is made available under the Apache License, 
version 2 [4]
 
 The Phoenix mailing lists and archives are listed here [5]
 
-1. http://phoenix.incubator.apache.org/
-2. http://phoenix.incubator.apache.org/source.html
-3. http://phoenix.incubator.apache.org/issues.html
+1. http://phoenix.apache.org/
+2. http://phoenix.apache.org/source.html
+3. http://phoenix.apache.org/issues.html
 4. http://www.apache.org/licenses/
-5. http://phoenix.incubator.apache.org/mailing_list.html
+5. http://phoenix.apache.org/mailing_list.html
 
 Upgrading from Phoenix 2.2.x to Apache Phoenix 3.0/4.0
 --

http://git-wip-us.apache.org/repos/asf/phoenix/blob/41ed8388/README.md
--
diff --git a/README.md b/README.md
index d08508d..41b562b 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-![logo](http://phoenix.incubator.apache.org/images/logo.png)
+![logo](http://phoenix.apache.org/images/logo.png)
 
-b[Apache Phoenix](http://phoenix.incubator.apache.org/)/b is a SQL skin 
over HBase delivered as a client-embedded JDBC driver targeting low latency 
queries over HBase data. Visit the Apache Phoenix Incubator website 
b[here](http://phoenix.incubator.apache.org/)/b.
+b[Apache Phoenix](http://phoenix.apache.org/)/b is a SQL skin over HBase 
delivered as a client-embedded JDBC driver targeting low latency queries over 
HBase data. Visit the Apache Phoenix website 
b[here](http://phoenix.apache.org/)/b.
 
 Copyright ©2014 [Apache Software Foundation](http://www.apache.org/). All 
Rights Reserved.



git commit: PHOENIX-1028 Prevent declaration of non PK columns as NOT NULL (Ravi)

2014-06-08 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master dbda9b702 - a56f78ba6


PHOENIX-1028 Prevent declaration of non PK columns as NOT NULL (Ravi)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a56f78ba
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a56f78ba
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a56f78ba

Branch: refs/heads/master
Commit: a56f78ba68a257b91a42ba0d9b24fcfe1d204c2b
Parents: dbda9b7
Author: James Taylor jtay...@salesforce.com
Authored: Sun Jun 8 21:37:35 2014 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Sun Jun 8 21:37:35 2014 -0700

--
 .../apache/phoenix/end2end/CreateTableIT.java   | 31 
 .../phoenix/exception/SQLExceptionCode.java |  1 +
 .../apache/phoenix/query/QueryConstants.java|  8 ++---
 .../apache/phoenix/schema/MetaDataClient.java   |  8 +
 .../phoenix/compile/QueryCompilerTest.java  |  2 +-
 .../phoenix/index/IndexMaintainerTest.java  |  4 +--
 6 files changed, 47 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a56f78ba/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
index 96b4a8e..e28273e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CreateTableIT.java
@@ -32,6 +32,7 @@ import java.util.Properties;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.schema.TableAlreadyExistsException;
@@ -318,5 +319,35 @@ public class CreateTableIT extends BaseClientManagedTimeIT 
{
assertEquals(a, columnFamilies[0].getNameAsString());
assertEquals(1, columnFamilies[0].getTimeToLive());
 }
+
+
+/**
+ * Test to ensure that NOT NULL constraint isn't added to a non primary 
key column.
+ * @throws Exception
+ */
+@Test
+public void testNotNullConstraintForNonPKColumn() throws Exception {
+
+String ddl = CREATE TABLE IF NOT EXISTS EVENT.APEX_LIMIT (  +
+ ORGANIZATION_ID CHAR(15) NOT NULL,  +
+ EVENT_TIME DATE NOT NULL, USER_ID CHAR(15) NOT NULL,  +
+ ENTRY_POINT_ID CHAR(15) NOT NULL, ENTRY_POINT_TYPE CHAR(2) 
NOT NULL ,  +
+ APEX_LIMIT_ID CHAR(15) NOT NULL,  USERNAME CHAR(80),   +
+ NAMESPACE_PREFIX VARCHAR, ENTRY_POINT_NAME VARCHAR  NOT NULL 
,  +
+ EXECUTION_UNIT_NO VARCHAR, LIMIT_TYPE VARCHAR,  +
+ LIMIT_VALUE DOUBLE   +
+ CONSTRAINT PK PRIMARY KEY ( + 
+ ORGANIZATION_ID, EVENT_TIME,USER_ID,ENTRY_POINT_ID, 
ENTRY_POINT_TYPE, APEX_LIMIT_ID  +
+ ) ) VERSIONS=1;
+
+Properties props = new Properties();
+Connection conn = DriverManager.getConnection(getUrl(), props);
+try {
+conn.createStatement().execute(ddl);
+fail( Non pk column ENTRY_POINT_NAME has a NOT NULL constraint);
+} catch( SQLException sqle) {
+
assertEquals(SQLExceptionCode.INVALID_NOT_NULL_CONSTRAINT.getErrorCode(),sqle.getErrorCode());
+}
+   }
 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a56f78ba/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index d9e23f5..39b951d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -138,6 +138,7 @@ public enum SQLExceptionCode {
 }),
 ORDER_BY_ARRAY_NOT_SUPPORTED(515, 42893, ORDER BY of an array type is 
not allowed),
 NON_EQUALITY_ARRAY_COMPARISON(516, 42894, Array types may only be 
compared using = or !=),
+INVALID_NOT_NULL_CONSTRAINT(517, 42895, Invalid not null constraint on 
non primary key column),
 
 /** 
  * HBase and Phoenix specific implementation defined sub-classes.


Build failed in Jenkins: Phoenix | 3.0 | Hadoop1 #116

2014-06-08 Thread Apache Jenkins Server
See https://builds.apache.org/job/Phoenix-3.0-hadoop1/116/changes

Changes:

[jtaylor] PHOENIX-1028 Prevent declaration of non PK columns as NOT NULL

--
[...truncated 357 lines...]
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.75 sec - in 
org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.224 sec - in 
org.apache.phoenix.end2end.TenantSpecificViewIndexIT
Running org.apache.phoenix.end2end.index.ImmutableIndexIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 32.845 sec - in 
org.apache.phoenix.end2end.index.ImmutableIndexIT
Running org.apache.phoenix.end2end.index.DropViewIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.48 sec - in 
org.apache.phoenix.end2end.index.DropViewIT
Running org.apache.phoenix.end2end.index.IndexMetadataIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.831 sec - in 
org.apache.phoenix.end2end.index.SaltedIndexIT
Running org.apache.phoenix.end2end.TimezoneOffsetFunctionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.99 sec - in 
org.apache.phoenix.end2end.TimezoneOffsetFunctionIT
Running org.apache.phoenix.end2end.UpsertSelectAutoCommitIT
Tests run: 60, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 109.6 sec - in 
org.apache.phoenix.end2end.HashJoinIT
Running org.apache.phoenix.end2end.BinaryRowKeyIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.919 sec - in 
org.apache.phoenix.end2end.BinaryRowKeyIT
Running org.apache.phoenix.end2end.ReverseFunctionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.984 sec - in 
org.apache.phoenix.end2end.UpsertSelectAutoCommitIT
Running org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.996 sec - in 
org.apache.phoenix.end2end.ReverseFunctionIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.45 sec - in 
org.apache.phoenix.end2end.index.IndexMetadataIT
Running org.apache.phoenix.end2end.SkipScanQueryIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.867 sec - in 
org.apache.phoenix.end2end.SpillableGroupByIT
Running org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.147 sec - in 
org.apache.phoenix.end2end.DecodeFunctionIT
Running org.apache.phoenix.end2end.TenantSpecificViewIndexSaltedIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.583 sec - in 
org.apache.phoenix.end2end.salted.SaltedTableUpsertSelectIT
Running org.apache.phoenix.end2end.AutoCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.248 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.SaltedViewIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.671 sec - in 
org.apache.phoenix.end2end.SkipScanQueryIT
Running org.apache.phoenix.end2end.ServerExceptionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.429 sec - in 
org.apache.phoenix.end2end.TenantSpecificViewIndexSaltedIT
Running org.apache.phoenix.end2end.ViewIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.815 sec - in 
org.apache.phoenix.end2end.ServerExceptionIT
Running org.apache.phoenix.end2end.CSVCommonsLoaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 21.937 sec - in 
org.apache.phoenix.end2end.SaltedViewIT
Running org.apache.phoenix.end2end.QueryPlanIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.912 sec - in 
org.apache.phoenix.end2end.ViewIT
Running org.apache.phoenix.end2end.AlterTableIT
Tests run: 13, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.755 sec - 
in org.apache.phoenix.end2end.CSVCommonsLoaderIT
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.137 sec - in 
org.apache.phoenix.end2end.QueryPlanIT
Running org.apache.phoenix.end2end.UpsertBigValuesIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.719 sec - in 
org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.QueryExecWithoutSCNIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.824 sec - in 
org.apache.phoenix.end2end.UpsertBigValuesIT
Running org.apache.phoenix.end2end.DeleteIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.927 sec - in 
org.apache.phoenix.end2end.QueryExecWithoutSCNIT
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 206.669 sec - 
in org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 13, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 78.915 sec - 
in org.apache.phoenix.end2end.AlterTableIT
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 39.28 sec -