Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2015-07-20 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[gabrielr] PHOENIX-2131 Closing paren in CastParseNode SQL



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Git Push Summary

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 [deleted] 954a4a1b6


phoenix git commit: Add ExpressionFactoryValuesTest

2015-07-20 Thread maryannxue
Repository: phoenix
Updated Branches:
  refs/heads/calcite c8c7f0df6 - 0641043a4


Add ExpressionFactoryValuesTest


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0641043a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0641043a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0641043a

Branch: refs/heads/calcite
Commit: 0641043a4dda12b399f914c7281846fa7fbd407d
Parents: c8c7f0d
Author: maryannxue wei@intel.com
Authored: Mon Jul 20 09:34:34 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Mon Jul 20 09:34:34 2015 -0400

--
 phoenix-core/pom.xml|  5 ++
 .../calcite/jdbc/PhoenixPrepareImpl.java|  2 -
 .../phoenix/calcite/rel/PhoenixValues.java  |  7 +++
 .../calcite/rules/PhoenixConverterRules.java| 59 +++-
 .../calcite/ExpressionFactoryValuesTest.java| 23 
 pom.xml |  6 ++
 6 files changed, 60 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0641043a/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 2f8f006..b5d3cbf 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -351,6 +351,11 @@
 /dependency
 dependency
   groupIdorg.apache.calcite/groupId
+  artifactIdcalcite-core/artifactId
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.calcite/groupId
   artifactIdcalcite-linq4j/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0641043a/phoenix-core/src/main/java/org/apache/phoenix/calcite/jdbc/PhoenixPrepareImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/jdbc/PhoenixPrepareImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/jdbc/PhoenixPrepareImpl.java
index 5cc911d..828497f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/jdbc/PhoenixPrepareImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/jdbc/PhoenixPrepareImpl.java
@@ -1,7 +1,5 @@
 package org.apache.phoenix.calcite.jdbc;
 
-import java.util.Map;
-
 import org.apache.calcite.jdbc.CalcitePrepare;
 import org.apache.calcite.jdbc.CalciteSchema;
 import org.apache.calcite.plan.RelOptCostFactory;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0641043a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixValues.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixValues.java 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixValues.java
index c982a28..6cb9f1a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixValues.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixValues.java
@@ -3,6 +3,8 @@ package org.apache.phoenix.calcite.rel;
 import java.util.List;
 
 import org.apache.calcite.plan.RelOptCluster;
+import org.apache.calcite.plan.RelOptCost;
+import org.apache.calcite.plan.RelOptPlanner;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.core.Values;
@@ -34,6 +36,11 @@ public class PhoenixValues extends Values implements 
PhoenixRel {
 }
 
 @Override
+public RelOptCost computeSelfCost(RelOptPlanner planner) {
+return super.computeSelfCost(planner).multiplyBy(PHOENIX_FACTOR);
+}
+
+@Override
 public QueryPlan implement(Implementor implementor) {
 throw new UnsupportedOperationException();
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0641043a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rules/PhoenixConverterRules.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rules/PhoenixConverterRules.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rules/PhoenixConverterRules.java
index ea34a1e..44ac971 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rules/PhoenixConverterRules.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rules/PhoenixConverterRules.java
@@ -25,6 +25,7 @@ import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.calcite.rel.core.Project;
 import org.apache.calcite.rel.core.Sort;
 import org.apache.calcite.rel.core.Union;
+import org.apache.calcite.rel.core.Values;
 import org.apache.calcite.rel.logical.LogicalAggregate;
 import org.apache.calcite.rel.logical.LogicalFilter;
 import org.apache.calcite.rel.logical.LogicalJoin;

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.1 eeefd6d12 - b07f68a60


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b07f68a6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b07f68a6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b07f68a6

Branch: refs/heads/4.4-HBase-1.1
Commit: b07f68a603dca48a53828f8c20c5f92478bf69e3
Parents: eeefd6d
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:26 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b07f68a6/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b07f68a6/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 6d322103e - 04e9d58d1


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/04e9d58d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/04e9d58d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/04e9d58d

Branch: refs/heads/4.x-HBase-1.0
Commit: 04e9d58d173e5bed05ab893237a60214e1756520
Parents: 6d32210
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:46 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/04e9d58d/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/04e9d58d/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[28/34] phoenix git commit: PHOENIX-2029 Queries are making two rpc calls for getTable

2015-07-20 Thread greid
PHOENIX-2029 Queries are making two rpc calls for getTable


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/52f5b046
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/52f5b046
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/52f5b046

Branch: refs/heads/4.x-HBase-1.1
Commit: 52f5b04643914f33c2d00a1157ca767a32f1adb8
Parents: 43c722c
Author: Thomas D'Silva twdsi...@gmail.com
Authored: Mon Jun 8 15:30:40 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Wed Jun 17 11:16:48 2015 -0700

--
 .../org/apache/phoenix/rpc/UpdateCacheIT.java   | 139 +++
 .../apache/phoenix/compile/QueryCompiler.java   |   2 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   6 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  26 ++--
 4 files changed, 156 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/52f5b046/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
new file mode 100644
index 000..c657e41
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
@@ -0,0 +1,139 @@
+package org.apache.phoenix.rpc;
+
+import static org.apache.phoenix.util.TestUtil.INDEX_DATA_SCHEMA;
+import static org.apache.phoenix.util.TestUtil.MUTABLE_INDEX_DATA_TABLE;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.end2end.Shadower;
+import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
+import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.MetaDataClient;
+import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.types.PVarchar;
+import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.collect.Maps;
+
+/**
+ * Verifies the number of rpcs calls from {@link MetaDataClient} updateCache() 
+ * for transactional and non-transactional tables.
+ */
+public class UpdateCacheIT extends BaseHBaseManagedTimeIT {
+   
+   public static final int NUM_MILLIS_IN_DAY = 8640;
+
+@Before
+public void setUp() throws SQLException {
+ensureTableCreated(getUrl(), MUTABLE_INDEX_DATA_TABLE);
+}
+
+   @BeforeClass
+@Shadower(classBeingShadowed = BaseHBaseManagedTimeIT.class)
+public static void doSetup() throws Exception {
+MapString,String props = Maps.newHashMapWithExpectedSize(3);
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+   
+   public static void validateRowKeyColumns(ResultSet rs, int i) throws 
SQLException {
+   assertTrue(rs.next());
+   assertEquals(rs.getString(1), varchar + String.valueOf(i));
+   assertEquals(rs.getString(2), char + String.valueOf(i));
+   assertEquals(rs.getInt(3), i);
+   assertEquals(rs.getInt(4), i);
+   assertEquals(rs.getBigDecimal(5), new BigDecimal(i*0.5d));
+   Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+   assertEquals(rs.getDate(6), date);
+   }
+   
+   public static void setRowKeyColumns(PreparedStatement stmt, int i) 
throws SQLException {
+// insert row
+stmt.setString(1, varchar + String.valueOf(i));
+stmt.setString(2, char + String.valueOf(i));
+stmt.setInt(3, i);
+stmt.setLong(4, i);
+stmt.setBigDecimal(5, new BigDecimal(i*0.5d));
+Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+stmt.setDate(6, date);
+}
+   
+   @Test
+   public 

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.0 d226c6a63 - 2501ecf74


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2501ecf7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2501ecf7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2501ecf7

Branch: refs/heads/4.4-HBase-1.0
Commit: 2501ecf744a5b7046997317a6a2f360abffece32
Parents: d226c6a
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:14 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2501ecf7/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2501ecf7/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[01/34] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 [created] 954a4a1b6


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
index 272cac6..e7e1dd7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
@@ -111,7 +111,7 @@ public class StatisticsCollector {
 this.statsTable.close();
 }
 
-public void updateStatistic(HRegion region) {
+public void updateStatistic(Region region) {
 try {
 ArrayListMutation mutations = new ArrayListMutation();
 writeStatsToStatsTable(region, true, mutations, 
TimeKeeper.SYSTEM.getCurrentTime());
@@ -126,7 +126,7 @@ public class StatisticsCollector {
 }
 }
 
-private void writeStatsToStatsTable(final HRegion region,
+private void writeStatsToStatsTable(final Region region,
 boolean delete, ListMutation mutations, long currentTime) throws 
IOException {
 try {
 // update the statistics table
@@ -215,7 +215,7 @@ public class StatisticsCollector {
 }
 }
 
-public InternalScanner createCompactionScanner(HRegion region, Store 
store, InternalScanner s) throws IOException {
+public InternalScanner createCompactionScanner(Region region, Store store, 
InternalScanner s) throws IOException {
 // See if this is for Major compaction
 if (logger.isDebugEnabled()) {
 logger.debug(Compaction scanner created for stats);
@@ -224,13 +224,13 @@ public class StatisticsCollector {
 return getInternalScanner(region, store, s, cfKey);
 }
 
-public void splitStats(HRegion parent, HRegion left, HRegion right) {
+public void splitStats(Region parent, Region left, Region right) {
 try {
 if (logger.isDebugEnabled()) {
 logger.debug(Collecting stats for split of  + 
parent.getRegionInfo() +  into  + left.getRegionInfo() +  and  + 
right.getRegionInfo());
 }
 ListMutation mutations = Lists.newArrayListWithExpectedSize(3);
-for (byte[] fam : parent.getStores().keySet()) {
+for (byte[] fam : parent.getTableDesc().getFamiliesKeys()) {
statsTable.splitStats(parent, left, right, this, new 
ImmutableBytesPtr(fam), mutations);
 }
 if (logger.isDebugEnabled()) {
@@ -243,7 +243,7 @@ public class StatisticsCollector {
 }
 }
 
-protected InternalScanner getInternalScanner(HRegion region, Store store,
+protected InternalScanner getInternalScanner(Region region, Store store,
 InternalScanner internalScan, ImmutableBytesPtr family) {
 return new StatisticsScanner(this, statsTable, region, internalScan, 
family);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
index 0e50923..582c4de 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
@@ -26,9 +26,9 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 
 /**
@@ -38,11 +38,11 @@ public class StatisticsScanner implements InternalScanner {
 private static final Log LOG = 

[07/34] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode

2015-07-20 Thread greid
PHOENIX-2005 Connection utilities omit zk client port, parent znode


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c6b37b97
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c6b37b97
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c6b37b97

Branch: refs/heads/4.x-HBase-1.1
Commit: c6b37b979da1b514bcb9257c7e095e39b0c2c215
Parents: 3cdc323
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 11:11:48 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 13:27:03 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java | 28 --
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 93 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  | 26 +-
 .../query/ConnectionQueryServicesImpl.java  |  4 +-
 .../java/org/apache/phoenix/util/QueryUtil.java | 45 --
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 14 ++-
 .../phoenix/mapreduce/CsvBulkLoadToolTest.java  | 11 ---
 .../mapreduce/CsvToKeyValueMapperTest.java  | 15 
 .../org/apache/phoenix/util/QueryUtilTest.java  | 33 ---
 9 files changed, 139 insertions(+), 130 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c6b37b97/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 9e95667..2451603 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -31,6 +31,7 @@ import java.util.logging.Logger;
 
 import javax.annotation.concurrent.Immutable;
 
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -174,10 +175,10 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 }
 
 /**
- * 
+ *
  * Class to encapsulate connection info for HBase
  *
- * 
+ *
  * @since 0.1.1
  */
 public static class ConnectionInfo {
@@ -204,12 +205,18 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 return false;
 }
 
-protected static ConnectionInfo create(String url) throws SQLException 
{
-StringTokenizer tokenizer = new StringTokenizer(url == null ?  : 
url.substring(PhoenixRuntime.JDBC_PROTOCOL.length()),DELIMITERS, true);
+public static ConnectionInfo create(String url) throws SQLException {
+url = url == null ?  : url;
+url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
+? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
+: url;
+StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];
 String token = null;
-while (tokenizer.hasMoreTokens()  
!(token=tokenizer.nextToken()).equals(TERMINATOR)  tokenizer.hasMoreTokens() 
 nTokens  tokens.length) {
+while (tokenizer.hasMoreTokens() 
+!(token=tokenizer.nextToken()).equals(TERMINATOR) 
+tokenizer.hasMoreTokens()  nTokens  tokens.length) {
 token = tokenizer.nextToken();
 // This would mean we have an empty string for a token which 
is illegal
 if (DELIMITERS.contains(token)) {
@@ -316,8 +323,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 private final String principal;
 private final String keytab;
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode, 
String principal, String keytab) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode, String principal, String keytab) {
 this.zookeeperQuorum = zookeeperQuorum;
 this.port = port;
 this.rootNode = rootNode;
@@ -326,8 +332,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 this.keytab = keytab;
 }
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode) {
this(zookeeperQuorum, port, rootNode, null, null);
 }
 
@@ -417,6 +422,11 @@ 

[29/34] phoenix git commit: PHOENIX-1941 Phoenix tests are failing in linux env with missing class: StaticMapping (Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-1941 Phoenix tests are failing in linux env with missing class: 
StaticMapping (Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/329d7494
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/329d7494
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/329d7494

Branch: refs/heads/4.x-HBase-1.1
Commit: 329d74948521ed974593e455369a27d9cd705249
Parents: 52f5b04
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:17:33 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:23:47 2015 -0700

--
 .../phoenix/end2end/End2EndTestDriver.java   | 19 +++
 1 file changed, 15 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/329d7494/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
index 26d18cf..743f729 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
@@ -21,6 +21,7 @@ package org.apache.phoenix.end2end;
 
 import java.io.IOException;
 import java.io.PrintStream;
+import java.lang.annotation.Annotation;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -79,10 +80,20 @@ public class End2EndTestDriver extends AbstractHBaseTool {
 
   @Override
   public boolean isCandidateClass(Class? c) {
-return testFilterRe.matcher(c.getName()).find() 
-  // Our pattern will match the below NON-IntegrationTest. Rather than
-  // do exotic regex, just filter it out here
-  super.isCandidateClass(c);
+  Annotation[] annotations = c.getAnnotations();
+  for (Annotation curAnnotation : annotations) {
+  if 
(curAnnotation.toString().contains(NeedsOwnMiniClusterTest)) {
+  /* Skip tests that aren't designed to run against a live 
cluster.
+   * For a live cluster, we cannot bring it up and down as 
required
+   * for these tests to run.
+   */
+  return false;
+  }
+  }
+  return testFilterRe.matcher(c.getName()).find() 
+  // Our pattern will match the below NON-IntegrationTest. 
Rather than
+  // do exotic regex, just filter it out here
+  super.isCandidateClass(c);
   }
 }
 



[12/34] phoenix git commit: PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a600cc4d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a600cc4d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a600cc4d

Branch: refs/heads/4.x-HBase-1.1
Commit: a600cc4d7acc2c828ae7782e59d094f99e5631f0
Parents: c95e28d
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri May 29 17:12:25 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri May 29 17:13:08 2015 -0700

--
 .../src/main/java/org/apache/phoenix/schema/PTableImpl.java  | 4 ++--
 .../src/test/java/org/apache/phoenix/query/BaseTest.java | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a600cc4d/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index bf4420c..bdc95b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -88,8 +88,8 @@ public class PTableImpl implements PTable {
 
 private PTableKey key;
 private PName name;
-private PName schemaName;
-private PName tableName;
+private PName schemaName = PName.EMPTY_NAME;
+private PName tableName = PName.EMPTY_NAME;
 private PName tenantId;
 private PTableType type;
 private PIndexState state;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a600cc4d/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..b0574c3 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -121,7 +121,6 @@ import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.ipc.controller.ServerRpcControllerFactory;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.LocalIndexMerger;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -829,6 +828,7 @@ public abstract class BaseTest {
 logger.info(Table  + fullTableName +  is already 
deleted.);
 }
 }
+rs.close();
 if (lastTenantId != null) {
 conn.close();
 }
@@ -860,6 +860,7 @@ public abstract class BaseTest {
 logger.info(DROP SEQUENCE STATEMENT: DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 conn.createStatement().execute(DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 }
+rs.close();
 }
 
 protected static void initSumDoubleValues(byte[][] splits, String url) 
throws Exception {



[19/34] phoenix git commit: PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level

2015-07-20 Thread greid
PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9c5f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9c5f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9c5f

Branch: refs/heads/4.x-HBase-1.1
Commit: 9c5fae456f3a0934e43e02af0ef5188b9337
Parents: 29ea503
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 15:58:32 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 15:57:15 2015 -0700

--
 .../java/org/apache/phoenix/filter/RowKeyComparisonFilter.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9c5f/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
index 2e2037b..b7de7ac 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
@@ -73,8 +73,9 @@ public class RowKeyComparisonFilter extends 
BooleanExpressionFilter {
 if (evaluate) {
 inputTuple.setKey(v.getRowArray(), v.getRowOffset(), 
v.getRowLength());
 this.keepRow = Boolean.TRUE.equals(evaluate(inputTuple));
-if (logger.isDebugEnabled()) {
-logger.debug(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)  +  row  + inputTuple);
+if (logger.isTraceEnabled()) {
+logger.trace(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)
++  row  + inputTuple);
 }
 evaluate = false;
 }



[02/34] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-07-20 Thread greid
PHOENIX-1681 Use the new Region Interface (Andrew Purtell)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ea622d5f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ea622d5f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ea622d5f

Branch: refs/heads/4.x-HBase-1.1
Commit: ea622d5f7ab5c37d2ecf8be6054e5ed42f36a035
Parents: 98271b8
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:22:54 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:30:56 2015 -0700

--
 ...ReplayWithIndexWritesAndCompressedWALIT.java |  4 +-
 .../EndToEndCoveredColumnsIndexBuilderIT.java   |  4 +-
 .../IndexHalfStoreFileReaderGenerator.java  |  9 +-
 .../regionserver/IndexSplitTransaction.java | 65 +-
 .../hbase/regionserver/LocalIndexMerger.java| 16 ++--
 .../hbase/regionserver/LocalIndexSplitter.java  | 11 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 26 +++---
 .../GroupedAggregateRegionObserver.java | 13 +--
 .../coprocessor/MetaDataEndpointImpl.java   | 94 ++--
 .../phoenix/coprocessor/ScanRegionObserver.java | 17 ++--
 .../coprocessor/SequenceRegionObserver.java | 16 ++--
 .../UngroupedAggregateRegionObserver.java   | 29 +++---
 .../hbase/index/covered/data/LocalTable.java|  5 +-
 .../write/ParallelWriterIndexCommitter.java |  8 +-
 .../recovery/PerRegionIndexWriteCache.java  | 10 +--
 .../recovery/StoreFailuresInCachePolicy.java|  4 +-
 .../TrackingParallelWriterIndexCommitter.java   |  8 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  4 +-
 .../apache/phoenix/index/PhoenixIndexCodec.java | 14 ++-
 .../schema/stats/StatisticsCollector.java   | 14 +--
 .../phoenix/schema/stats/StatisticsScanner.java | 16 ++--
 .../phoenix/schema/stats/StatisticsWriter.java  | 16 ++--
 .../java/org/apache/phoenix/util/IndexUtil.java | 38 
 .../index/covered/TestLocalTableState.java  |  8 +-
 .../index/write/TestWALRecoveryCaching.java | 17 ++--
 .../recovery/TestPerRegionIndexWriteCache.java  |  6 +-
 26 files changed, 230 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 3b8ff29..611ba68 100644
--- 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -159,7 +159,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
   }
 
   /**
-   * Test writing edits into an HRegion, closing it, splitting logs, opening 
Region again. Verify
+   * Test writing edits into an region, closing it, splitting logs, opening 
Region again. Verify
* seqids.
* @throws Exception on failure
*/
@@ -183,7 +183,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
 builder.build(htd);
 
 // create the region + its WAL
-HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, htd);
+HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, 
htd); // FIXME: Uses private type
 region0.close();
 region0.getWAL().close();
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
index d90733f..6b2309e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.util.EnvironmentEdge;
 import 

[31/34] phoenix git commit: PHOENIX-1975 Detect and use HBASE_HOME when set

2015-07-20 Thread greid
PHOENIX-1975 Detect and use HBASE_HOME when set


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/980d29c5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/980d29c5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/980d29c5

Branch: refs/heads/4.x-HBase-1.1
Commit: 980d29c5acf785dc90ece1a7f047711e8d522a2e
Parents: 05b1b8b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 24 13:59:00 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 24 13:59:28 2015 -0700

--
 bin/phoenix_utils.py | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/980d29c5/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 383e0e1..bfb4737 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -65,7 +65,15 @@ def setPath():
 phoenix_class_path = os.getenv('PHOENIX_CLASS_PATH','')
 
 global hbase_conf_dir
-hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH', 
'.'))
+# if HBASE_CONF_DIR set explicitly, use that
+hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH'))
+if not hbase_conf_dir:
+# else fall back to HBASE_HOME
+if os.getenv('HBASE_HOME'):
+hbase_conf_dir = os.path.join(os.getenv('HBASE_HOME'), conf)
+else:
+# default to pwd
+hbase_conf_dir = '.'
 global hbase_conf_path # keep conf_path around for backward compatibility
 hbase_conf_path = hbase_conf_dir
 



[14/34] phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-07-20 Thread greid
PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c1882ee2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c1882ee2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c1882ee2

Branch: refs/heads/4.x-HBase-1.1
Commit: c1882ee279293b560fda9beb10ac50b8d3ead589
Parents: b2c0cb9
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:22:49 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1882ee2/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 828f776..3f73048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, ScannerContext scannerContext) 
throws IOException {



[16/34] phoenix git commit: PHOENIX-2016 Some Phoenix tests failed with NPE(Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-2016 Some Phoenix tests failed with NPE(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dc46b144
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dc46b144
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dc46b144

Branch: refs/heads/4.x-HBase-1.1
Commit: dc46b144aa9eaf315c3969669dab7f0a50d94281
Parents: eb9452d
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon Jun 1 21:34:16 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon Jun 1 21:34:16 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dc46b144/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index b0574c3..fa78656 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -1627,6 +1627,7 @@ public abstract class BaseTest {
  * Disable and drop all the tables except SYSTEM.CATALOG and 
SYSTEM.SEQUENCE
  */
 private static void disableAndDropNonSystemTables() throws Exception {
+if (driver == null) return;
 HBaseAdmin admin = driver.getConnectionQueryServices(null, 
null).getAdmin();
 try {
 HTableDescriptor[] tables = admin.listTables();



[30/34] phoenix git commit: PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing (Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing 
(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/05b1b8b1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/05b1b8b1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/05b1b8b1

Branch: refs/heads/4.x-HBase-1.1
Commit: 05b1b8b13f4137602567f67642946c883646d4d8
Parents: 329d749
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:28:35 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:31:28 2015 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java  | 2 ++
 phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java | 3 +++
 .../src/test/java/org/apache/phoenix/query/BaseTest.java| 5 -
 3 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index b9d7180..3140077 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -98,6 +98,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(1, rs.getInt(1));
 assertEquals(121, rs.getInt(2));
 assertFalse(rs.next());
+conn.close();
 }
 
 protected void testUpdatableViewIndex(Integer saltBuckets) throws 
Exception {
@@ -179,6 +180,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 + CLIENT MERGE SORT,
 QueryUtil.getExplainPlan(rs));
 }
+conn.close();
 }
 
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 266438d..fb58a8f 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -92,8 +92,11 @@ public class ViewIT extends BaseViewIT {
 fail();
 } catch (ReadOnlyTableException e) {
 
+} finally {
+conn.close();
 }
 
+conn = DriverManager.getConnection(getUrl());
 int count = 0;
 ResultSet rs = conn.createStatement().executeQuery(SELECT k FROM v2);
 while (rs.next()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index fa78656..3f09518 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -115,6 +115,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -1634,7 +1635,9 @@ public abstract class BaseTest {
 for (HTableDescriptor table : tables) {
 String schemaName = 
SchemaUtil.getSchemaNameFromFullName(table.getName());
 if (!QueryConstants.SYSTEM_SCHEMA_NAME.equals(schemaName)) {
-admin.disableTable(table.getName());
+try{
+admin.disableTable(table.getName());
+} catch (TableNotEnabledException ignored){}
 admin.deleteTable(table.getName());
 }
 }



[04/34] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-07-20 Thread greid
PHOENIX-1763 Support building with HBase-1.1.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/98271b88
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/98271b88
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/98271b88

Branch: refs/heads/4.x-HBase-1.1
Commit: 98271b888c113f10e174205434e05d3b36b7eb67
Parents: bf01eb2
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:08:26 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:30:56 2015 -0700

--
 phoenix-core/pom.xml| 17 +++--
 .../regionserver/IndexHalfStoreFileReader.java  | 31 ++--
 .../regionserver/IndexSplitTransaction.java | 39 --
 .../hbase/regionserver/LocalIndexMerger.java|  3 +-
 .../cache/aggcache/SpillableGroupByCache.java   | 13 +++-
 .../phoenix/coprocessor/BaseRegionScanner.java  | 12 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 77 +++-
 .../coprocessor/DelegateRegionScanner.java  | 23 --
 .../GroupedAggregateRegionObserver.java | 53 --
 .../coprocessor/HashJoinRegionScanner.java  | 60 ---
 .../coprocessor/MetaDataRegionObserver.java | 23 +++---
 .../phoenix/coprocessor/ScanRegionObserver.java | 11 ++-
 .../UngroupedAggregateRegionObserver.java   | 55 +++---
 .../hbase/index/covered/data/LocalTable.java|  2 +-
 .../index/covered/filter/FamilyOnlyFilter.java  |  6 +-
 .../index/scanner/FilteredKeyValueScanner.java  |  2 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  6 +-
 .../iterate/RegionScannerResultIterator.java|  9 ++-
 .../phoenix/schema/stats/StatisticsScanner.java | 10 ++-
 .../hbase/ipc/PhoenixIndexRpcSchedulerTest.java |  6 +-
 .../index/covered/TestLocalTableState.java  |  1 -
 .../covered/filter/TestFamilyOnlyFilter.java| 12 +--
 .../index/write/TestWALRecoveryCaching.java |  4 +-
 phoenix-flume/pom.xml   |  9 ---
 phoenix-pig/pom.xml | 31 +---
 phoenix-spark/pom.xml   |  7 ++
 pom.xml | 41 ++-
 27 files changed, 361 insertions(+), 202 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 45b8d73..22e6b60 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -350,16 +350,25 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-annotations/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
 /dependency
 dependency
@@ -369,18 +378,16 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
   typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
@@ -391,13 +398,11 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
index 49e2022..9befc8c 100644
--- 

[05/34] phoenix git commit: PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-07-20 Thread greid
PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/56e1c0a1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/56e1c0a1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/56e1c0a1

Branch: refs/heads/4.x-HBase-1.1
Commit: 56e1c0a1f348572fb73e9d0b8bbfb053df7f8710
Parents: ea622d5
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sat May 23 23:29:31 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sat May 23 23:29:31 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/56e1c0a1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..4aa28c4 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,6 +620,8 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
+// Remove this configuration once hbase has HBASE-13756 fix.
+conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[23/34] phoenix git commit: PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bfd860ff
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bfd860ff
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bfd860ff

Branch: refs/heads/4.x-HBase-1.1
Commit: bfd860ffec62a784f1229997cf98892ea3c0592d
Parents: 18b9e72
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed Jun 10 01:01:29 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed Jun 10 01:01:29 2015 +0530

--
 .../apache/phoenix/end2end/SortMergeJoinIT.java | 54 
 .../phoenix/execute/SortMergeJoinPlan.java  |  4 +-
 2 files changed, 56 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bfd860ff/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
index 6f14a45..8b65ab3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
@@ -2658,5 +2658,59 @@ public class SortMergeJoinIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+@Test
+public void testSubqueryWithoutData() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+
+try {
+String GRAMMAR_TABLE = CREATE TABLE IF NOT EXISTS GRAMMAR_TABLE 
(ID INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String LARGE_TABLE = CREATE TABLE IF NOT EXISTS LARGE_TABLE (ID 
INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String SECONDARY_LARGE_TABLE = CREATE TABLE IF NOT EXISTS 
SECONDARY_LARGE_TABLE (SEC_ID INTEGER PRIMARY KEY, +
+sec_unsig_id UNSIGNED_INT, sec_big_id BIGINT, 
sec_usnig_long_id UNSIGNED_LONG, sec_tiny_id TINYINT, + 
+sec_unsig_tiny_id UNSIGNED_TINYINT, sec_small_id 
SMALLINT, sec_unsig_small_id UNSIGNED_SMALLINT, + 
+sec_float_id FLOAT, sec_unsig_float_id UNSIGNED_FLOAT, 
sec_double_id DOUBLE, sec_unsig_double_id UNSIGNED_DOUBLE, +
+sec_decimal_id DECIMAL, sec_boolean_id BOOLEAN, 
sec_time_id TIME, sec_date_id DATE, +
+sec_timestamp_id TIMESTAMP, sec_unsig_time_id TIME, 
sec_unsig_date_id DATE, sec_unsig_timestamp_id TIMESTAMP, +
+sec_varchar_id VARCHAR (30), sec_char_id CHAR (30), 
sec_binary_id BINARY (100), sec_varbinary_id VARBINARY (100));
+createTestTable(getUrl(), GRAMMAR_TABLE);
+createTestTable(getUrl(), LARGE_TABLE);
+createTestTable(getUrl(), SECONDARY_LARGE_TABLE);
+
+String ddl = SELECT /*+USE_SORT_MERGE_JOIN*/ * FROM (SELECT ID, 
BIG_ID, DATE_ID FROM LARGE_TABLE AS A WHERE (A.ID % 5) = 0) AS A  +
+INNER JOIN (SELECT SEC_ID, SEC_TINY_ID, 
SEC_UNSIG_FLOAT_ID FROM SECONDARY_LARGE_TABLE AS B WHERE (B.SEC_ID % 5) = 0) AS 
B  + 
+ON A.ID=B.SEC_ID WHERE A.DATE_ID  ALL (SELECT 
SEC_DATE_ID FROM SECONDARY_LARGE_TABLE LIMIT 100)  +  
+AND 

[25/34] phoenix git commit: PHOENIX-2033 PQS log environment details on launch

2015-07-20 Thread greid
PHOENIX-2033 PQS log environment details on launch


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e64f61ba
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e64f61ba
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e64f61ba

Branch: refs/heads/4.x-HBase-1.1
Commit: e64f61ba431b8db938bf60992bbde56f4c540946
Parents: f7d7349
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue Jun 9 17:12:21 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri Jun 12 09:38:42 2015 -0700

--
 .../apache/phoenix/queryserver/server/Main.java | 69 
 1 file changed, 69 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e64f61ba/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
--
diff --git 
a/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java 
b/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
index 55febc5..9f9bfc7 100644
--- 
a/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
+++ 
b/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
@@ -34,7 +34,12 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
 import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -50,6 +55,11 @@ public final class Main extends Configured implements Tool, 
Runnable {
   phoenix.queryserver.http.port;
   public static final int DEFAULT_HTTP_PORT = 8765;
 
+  public static final String QUERY_SERVER_ENV_LOGGING_KEY =
+  phoenix.queryserver.envvars.logging.disabled;
+  public static final String QUERY_SERVER_ENV_LOGGING_SKIPWORDS_KEY =
+  phoenix.queryserver.envvars.logging.skipwords;
+
   public static final String KEYTAB_FILENAME_KEY = 
phoenix.queryserver.keytab.file;
   public static final String KERBEROS_PRINCIPAL_KEY = 
phoenix.queryserver.kerberos.principal;
   public static final String DNS_NAMESERVER_KEY = 
phoenix.queryserver.dns.nameserver;
@@ -58,12 +68,70 @@ public final class Main extends Configured implements Tool, 
Runnable {
 
   protected static final Log LOG = LogFactory.getLog(Main.class);
 
+  @SuppressWarnings(serial)
+  private static final SetString DEFAULT_SKIP_WORDS = new HashSetString() {
+{
+  add(secret);
+  add(passwd);
+  add(password);
+  add(credential);
+}
+  };
+
   private final String[] argv;
   private final CountDownLatch runningLatch = new CountDownLatch(1);
   private HttpServer server = null;
   private int retCode = 0;
   private Throwable t = null;
 
+  /**
+   * Log information about the currently running JVM.
+   */
+  public static void logJVMInfo() {
+// Print out vm stats before starting up.
+RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();
+if (runtime != null) {
+  LOG.info(vmName= + runtime.getVmName() + , vmVendor= +
+  runtime.getVmVendor() + , vmVersion= + runtime.getVmVersion());
+  LOG.info(vmInputArguments= + runtime.getInputArguments());
+}
+  }
+
+  /**
+   * Logs information about the currently running JVM process including
+   * the environment variables. Logging of env vars can be disabled by
+   * setting {@code phoenix.envvars.logging.disabled} to {@code true}.
+   * pIf enabled, you can also exclude environment variables containing
+   * certain substrings by setting {@code phoenix.envvars.logging.skipwords}
+   * to comma separated list of such substrings.
+   */
+  public static void logProcessInfo(Configuration conf) {
+// log environment variables unless asked not to
+if (conf == null || !conf.getBoolean(QUERY_SERVER_ENV_LOGGING_KEY, false)) 
{
+  SetString skipWords = new HashSetString(DEFAULT_SKIP_WORDS);
+  if (conf != null) {
+String[] confSkipWords = 
conf.getStrings(QUERY_SERVER_ENV_LOGGING_SKIPWORDS_KEY);
+if (confSkipWords != null) {
+  skipWords.addAll(Arrays.asList(confSkipWords));
+}
+  }
+
+  nextEnv:
+  for (Map.EntryString, String entry : System.getenv().entrySet()) {
+String key = entry.getKey().toLowerCase();
+String value = entry.getValue().toLowerCase();
+// exclude variables which may contain skip words
+for(String skipWord : skipWords) {
+  if (key.contains(skipWord) || value.contains(skipWord))
+continue nextEnv;
+}
+LOG.info(env:+entry);
+  }
+}
+// and 

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master b329e85b6 - b38a62431


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b38a6243
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b38a6243
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b38a6243

Branch: refs/heads/master
Commit: b38a62431ee44df171c913097d18e2433c951466
Parents: b329e85
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:25:01 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b38a6243/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b38a6243/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[09/34] phoenix git commit: Revert PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-07-20 Thread greid
Revert PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)

This reverts commit 56e1c0a1f348572fb73e9d0b8bbfb053df7f8710.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1a2f2dc1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1a2f2dc1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1a2f2dc1

Branch: refs/heads/4.x-HBase-1.1
Commit: 1a2f2dc1ea6dd75c224b78a0dc2b312b0e6f5bce
Parents: 5546a42
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed May 27 14:52:25 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed May 27 14:52:25 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1a2f2dc1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 4aa28c4..54ae670 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,8 +620,6 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
-// Remove this configuration once hbase has HBASE-13756 fix.
-conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[34/34] phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/954a4a1b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/954a4a1b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/954a4a1b

Branch: refs/heads/4.x-HBase-1.1
Commit: 954a4a1b615e6c42dfaecbf2ff6b8174261ccea6
Parents: 89ab41c
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:57 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/954a4a1b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/954a4a1b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[11/34] phoenix git commit: PHOENIX-1964 - porting from master

2015-07-20 Thread greid
PHOENIX-1964 - porting from master


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c95e28df
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c95e28df
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c95e28df

Branch: refs/heads/4.x-HBase-1.1
Commit: c95e28df94241f47d5cfe9a1515b21960c93adf2
Parents: 0e0b4dd
Author: cmarcel cmar...@salesforce.com
Authored: Wed May 27 13:58:45 2015 -0700
Committer: cmarcel cmar...@salesforce.com
Committed: Wed May 27 13:58:45 2015 -0700

--
 phoenix-pherf/config/pherf.properties   |  3 ++
 .../org/apache/phoenix/pherf/DataIngestIT.java  |  3 +-
 .../apache/phoenix/pherf/ResultBaseTestIT.java  | 45 ++
 .../java/org/apache/phoenix/pherf/Pherf.java|  7 +--
 .../apache/phoenix/pherf/PherfConstants.java| 50 +++-
 .../phoenix/pherf/loaddata/DataLoader.java  |  2 +-
 .../apache/phoenix/pherf/result/ResultUtil.java |  4 +-
 .../pherf/result/impl/CSVResultHandler.java |  5 +-
 .../pherf/result/impl/ImageResultHandler.java   |  5 +-
 .../pherf/result/impl/XMLResultHandler.java |  6 ++-
 .../apache/phoenix/pherf/util/ResourceList.java | 26 --
 .../pherf/workload/WorkloadExecutor.java|  2 +-
 .../phoenix/pherf/ConfigurationParserTest.java  |  2 +-
 .../org/apache/phoenix/pherf/ResourceTest.java  |  8 ++--
 .../apache/phoenix/pherf/ResultBaseTest.java| 44 +
 .../org/apache/phoenix/pherf/ResultTest.java|  5 +-
 16 files changed, 168 insertions(+), 49 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/config/pherf.properties
--
diff --git a/phoenix-pherf/config/pherf.properties 
b/phoenix-pherf/config/pherf.properties
index 354707a..1142f9b5 100644
--- a/phoenix-pherf/config/pherf.properties
+++ b/phoenix-pherf/config/pherf.properties
@@ -29,3 +29,6 @@ pherf.default.dataloader.threadpool=0
 # When upserting, this is the max # of rows that will be inserted in a single 
commit
 pherf.default.dataloader.batchsize=1000
 
+# Directory where results from a scenario run will be written
+pherf.default.results.dir=RESULTS
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index b29656d..2b56f43 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -18,7 +18,6 @@
 
 package org.apache.phoenix.pherf;
 
-import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.pherf.configuration.Column;
 import org.apache.phoenix.pherf.configuration.DataTypeMapping;
 import org.apache.phoenix.pherf.configuration.Scenario;
@@ -39,7 +38,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-public class DataIngestIT extends BaseHBaseManagedTimeIT {
+public class DataIngestIT extends ResultBaseTestIT {
 protected static PhoenixUtil util = new PhoenixUtil(true);
 static final String matcherScenario = .*scenario/.*test.*xml;
 static final String matcherSchema = .*datamodel/.*test.*sql;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
new file mode 100644
index 000..6e103b8
--- /dev/null
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   License); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an AS IS BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the specific language governing 

[22/34] phoenix git commit: PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)

2015-07-20 Thread greid
PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/18b9e727
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/18b9e727
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/18b9e727

Branch: refs/heads/4.x-HBase-1.1
Commit: 18b9e72756642e127b2e227ea46a4f70401e6187
Parents: 58ee706
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Fri Jun 5 09:04:17 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Fri Jun 5 09:04:17 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 58 ++--
 phoenix-core/src/main/antlr3/PhoenixSQL.g   | 17 +++---
 2 files changed, 61 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/18b9e727/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 868e19d..c6bd62f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -58,6 +58,8 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.FunctionAlreadyExistsException;
 import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
@@ -121,11 +123,31 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(
ptr.set(PInteger.INSTANCE.toBytes((Integer)sum));\n)
 .append(return true;\n)
 .append(}\n).toString();
-
+private static String ARRAY_INDEX_EVALUATE_METHOD =
+new StringBuffer()
+.append(public boolean evaluate(Tuple tuple, 
ImmutableBytesWritable ptr) {\n)
+.append(Expression indexExpr = 
children.get(1);\n)
+.append(if (!indexExpr.evaluate(tuple, ptr)) {\n)
+.append(   return false;\n)
+.append(} else if (ptr.getLength() == 0) {\n)
+.append(   return true;\n)
+.append(}\n)
+.append(// Use Codec to prevent Integer object 
allocation\n)
+.append(int index = 
PInteger.INSTANCE.getCodec().decodeInt(ptr, indexExpr.getSortOrder());\n)
+.append(if(index  0) {\n)
+.append(   throw new ParseException(\Index 
cannot be negative :\ + index);\n)
+.append(}\n)
+.append(Expression arrayExpr = 
children.get(0);\n)
+.append(return 
PArrayDataType.positionAtArrayElement(tuple, ptr, index, arrayExpr, 
getDataType(),getMaxLength());\n)
+.append(}\n).toString();
+
+
 private static String MY_REVERSE_CLASS_NAME = MyReverse;
 private static String MY_SUM_CLASS_NAME = MySum;
-private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, PVarchar);
-private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, PInteger);
+private static String MY_ARRAY_INDEX_CLASS_NAME = MyArrayIndex;
+private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, return 
PVarchar.INSTANCE;);
+private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, return PInteger.INSTANCE;);
+private static String MY_ARRAY_INDEX_PROGRAM = 
getProgram(MY_ARRAY_INDEX_CLASS_NAME, ARRAY_INDEX_EVALUATE_METHOD, return 
PDataType.fromTypeId(children.get(0).getDataType().getSqlType()- 
PDataType.ARRAY_TYPE_BASE););
 private static Properties EMPTY_PROPS = new Properties();
 
 
@@ -144,6 +166,8 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(import org.apache.phoenix.schema.types.PInteger;\n)
 .append(import org.apache.phoenix.schema.types.PVarchar;\n)
 .append(import org.apache.phoenix.util.StringUtil;\n)
+.append(import 

[17/34] phoenix git commit: PHOENIX-1976 Exit gracefully if addShutdownHook fails.

2015-07-20 Thread greid
PHOENIX-1976 Exit gracefully if addShutdownHook fails.

If the JVM is already in the process of shutting down,
we don't need to add the shutdown hook for the PhoenixDriver
instance. Additionally, we shouldn't advertise this instance
either since we're going down.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f2be9138
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f2be9138
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f2be9138

Branch: refs/heads/4.x-HBase-1.1
Commit: f2be9138359b078fd3e285f3fd441de711789962
Parents: dc46b14
Author: Josh Elser josh.el...@gmail.com
Authored: Thu May 14 17:40:46 2015 -0400
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 12:02:28 2015 -0700

--
 .../org/apache/phoenix/jdbc/PhoenixDriver.java  | 46 ++--
 1 file changed, 32 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2be9138/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
index 6360d06..cfabe82 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
@@ -60,25 +60,43 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 private static volatile String driverShutdownMsg;
 static {
 try {
-DriverManager.registerDriver( INSTANCE = new PhoenixDriver() );
-// Add shutdown hook to release any resources that were never 
closed
-// In theory not necessary, but it won't hurt anything
-Runtime.getRuntime().addShutdownHook(new Thread() {
-@Override
-public void run() {
-try {
-INSTANCE.close();
-} catch (SQLException e) {
-logger.warn(Unable to close PhoenixDriver on 
shutdown, e);
-} finally {
-driverShutdownMsg = Phoenix driver closed because 
server is shutting down;
+INSTANCE = new PhoenixDriver();
+try {
+// Add shutdown hook to release any resources that were never 
closed
+// In theory not necessary, but it won't hurt anything
+Runtime.getRuntime().addShutdownHook(new Thread() {
+@Override
+public void run() {
+closeInstance(INSTANCE);
 }
-}
-});
+});
+
+// Only register the driver when we successfully register the 
shutdown hook
+// Don't want to register it if we're already in the process 
of going down.
+DriverManager.registerDriver( INSTANCE );
+} catch (IllegalStateException e) {
+logger.warn(Failed to register PhoenixDriver shutdown hook as 
the JVM is already shutting down);
+
+// Close the instance now because we don't have the shutdown 
hook
+closeInstance(INSTANCE);
+
+throw e;
+}
 } catch (SQLException e) {
 throw new IllegalStateException(Unable to register  + 
PhoenixDriver.class.getName() + : + e.getMessage());
 }
 }
+
+private static void closeInstance(PhoenixDriver instance) {
+try {
+instance.close();
+} catch (SQLException e) {
+logger.warn(Unable to close PhoenixDriver on shutdown, e);
+} finally {
+driverShutdownMsg = Phoenix driver closed because server is 
shutting down;
+}
+}
+
 // One entry per cluster here
 private final ConcurrentMapConnectionInfo,ConnectionQueryServices 
connectionQueryServicesMap = new 
ConcurrentHashMapConnectionInfo,ConnectionQueryServices(3);
 



[18/34] phoenix git commit: PHOENIX-1962 Apply check style to the build

2015-07-20 Thread greid
PHOENIX-1962 Apply check style to the build


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/29ea5035
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/29ea5035
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/29ea5035

Branch: refs/heads/4.x-HBase-1.1
Commit: 29ea503546265a619ce501c477a109b69f940a00
Parents: f2be913
Author: Nick Dimiduk ndimi...@apache.org
Authored: Sat May 9 11:10:54 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 12:21:48 2015 -0700

--
 phoenix-assembly/pom.xml|   4 +
 phoenix-core/pom.xml|   4 +
 phoenix-flume/pom.xml   |   4 +
 phoenix-pherf/pom.xml   |   1 +
 phoenix-pig/pom.xml |   4 +
 phoenix-server-client/pom.xml   |   4 +
 phoenix-server/pom.xml  |   4 +
 phoenix-spark/pom.xml   |   1 +
 pom.xml |  23 ++
 src/main/config/checkstyle/checker.xml  | 281 +++
 src/main/config/checkstyle/header.txt   |  16 ++
 src/main/config/checkstyle/suppressions.xml |  46 
 12 files changed, 392 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 04d9335..d275d03 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -33,6 +33,10 @@
   descriptionAssemble Phoenix artifacts/description
   packagingpom/packaging
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 951e969..6302441 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -24,6 +24,10 @@
   urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 resources
   resource

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index ea87ab0..c7f0650 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index e751d73..dd45075 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -30,6 +30,7 @@
 namePhoenix - Pherf/name
 
 properties
+  top.dir${project.basedir}/../top.dir
 /properties
 
 profiles

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 957c06f..55b34d3 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
 dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 748e57c..3e54a07 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index ab9a472..86b2525 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin


[21/34] phoenix git commit: PHOENIX-777 - Support null value for fixed length ARRAY - Addendum (Ram)

2015-07-20 Thread greid
PHOENIX-777 - Support null value for fixed length ARRAY - Addendum (Ram)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/58ee7062
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/58ee7062
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/58ee7062

Branch: refs/heads/4.x-HBase-1.1
Commit: 58ee7062c624dd72a5cdaa41ec5b107a1e7b14c2
Parents: 6f890ad
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 14:32:02 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 14:36:05 2015 +0530

--
 .../main/java/org/apache/phoenix/schema/types/PTimestamp.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/58ee7062/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
index d396adc..16b110e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.DateUtil;
 
 public class PTimestamp extends PDataTypeTimestamp {
@@ -47,6 +48,10 @@ public class PTimestamp extends PDataTypeTimestamp {
   @Override
   public int toBytes(Object object, byte[] bytes, int offset) {
 if (object == null) {
+  // Create the byte[] of size MAX_TIMESTAMP_BYTES
+  if(bytes.length != getByteSize()) {
+  bytes = Bytes.padTail(bytes, (getByteSize() - bytes.length));
+  }
   PDate.INSTANCE.getCodec().encodeLong(0l, bytes, offset);
   Bytes.putInt(bytes, offset + Bytes.SIZEOF_LONG, 0);
   return getByteSize();



[13/34] phoenix git commit: PHOENIX-2010 Properly validate number of arguments passed to the functions in FunctionParseNode#validate(Rajeshbabu)

2015-07-20 Thread greid
PHOENIX-2010 Properly validate number of arguments passed to the functions in 
FunctionParseNode#validate(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b2c0cb90
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b2c0cb90
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b2c0cb90

Branch: refs/heads/4.x-HBase-1.1
Commit: b2c0cb9002ee881f21d968817c386a98d39074ca
Parents: a600cc4
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sun May 31 07:40:39 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sun May 31 07:40:39 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java   | 14 ++
 .../org/apache/phoenix/parse/FunctionParseNode.java   |  4 
 .../main/java/org/apache/phoenix/parse/PFunction.java |  4 +---
 3 files changed, 19 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 7dbde3c..868e19d 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -442,6 +442,20 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
+try {
+rs = stmt.executeQuery(select k from t9 where 
mysum9(k,10,'x')=11);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
+try {
+rs = stmt.executeQuery(select mysum9() from t9);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
 stmt.execute(drop function mysum9);
 try {
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
index d1001ee..be52d89 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
@@ -41,6 +41,7 @@ import 
org.apache.phoenix.expression.function.FunctionExpression;
 import org.apache.phoenix.expression.function.UDFExpression;
 import org.apache.phoenix.parse.PFunction.FunctionArgument;
 import org.apache.phoenix.schema.ArgumentTypeMismatchException;
+import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDataTypeFactory;
@@ -133,6 +134,9 @@ public class FunctionParseNode extends CompoundParseNode {
 public ListExpression validate(ListExpression children, 
StatementContext context) throws SQLException {
 BuiltInFunctionInfo info = this.getInfo();
 BuiltInFunctionArgInfo[] args = info.getArgs();
+if (args.length  children.size() || info.getRequiredArgCount()  
children.size()) {
+throw new FunctionNotFoundException(this.name);
+}
 if (args.length  children.size()) {
 ListExpression moreChildren = new 
ArrayListExpression(children);
 for (int i = children.size(); i  info.getArgs().length; i++) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
index 351bec7..aeed3ac 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
@@ -96,9 +96,7 @@ public class PFunction implements PMetaDataEntity {
 }
 
 public 

[10/34] phoenix git commit: PHOENIX-2013 Apply PHOENIX-1995 to runnable uberjar as well

2015-07-20 Thread greid
PHOENIX-2013 Apply PHOENIX-1995 to runnable uberjar as well


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0e0b4ddb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0e0b4ddb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0e0b4ddb

Branch: refs/heads/4.x-HBase-1.1
Commit: 0e0b4ddb4d130b38c7aa28d2e31b0a9552087256
Parents: 1a2f2dc
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 11:27:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed May 27 13:20:32 2015 -0700

--
 phoenix-server/src/build/query-server-runnable.xml | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0e0b4ddb/phoenix-server/src/build/query-server-runnable.xml
--
diff --git a/phoenix-server/src/build/query-server-runnable.xml 
b/phoenix-server/src/build/query-server-runnable.xml
index e2a3dc4..ef22b14 100644
--- a/phoenix-server/src/build/query-server-runnable.xml
+++ b/phoenix-server/src/build/query-server-runnable.xml
@@ -28,6 +28,15 @@
 formatjar/format
   /formats
   includeBaseDirectoryfalse/includeBaseDirectory
+  containerDescriptorHandlers
+containerDescriptorHandler
+  !--
+  aggregate SPI's so that things like HDFS FileSystem works in uberjar
+  http://docs.oracle.com/javase/tutorial/sound/SPI-intro.html
+  --
+  handlerNamemetaInf-services/handlerName
+/containerDescriptorHandler
+  /containerDescriptorHandlers
   dependencySets
 dependencySet
   outputDirectory//outputDirectory



[27/34] phoenix git commit: PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)

2015-07-20 Thread greid
PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/43c722ca
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/43c722ca
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/43c722ca

Branch: refs/heads/4.x-HBase-1.1
Commit: 43c722ca6d2d55347d1f2caf7641ce03339e1e1e
Parents: d0bcb7b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Mon Jun 15 16:16:03 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 15 16:16:30 2015 -0700

--
 phoenix-assembly/pom.xml |  4 
 phoenix-spark/pom.xml| 51 ---
 2 files changed, 32 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/43c722ca/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index ebc5d71..d275d03 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -152,6 +152,10 @@
 /dependency
 dependency
   groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-spark/artifactId
+/dependency
+dependency
+  groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-server/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/43c722ca/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 1747573..aea5c7e 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -45,12 +45,7 @@
   groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-core/artifactId
 /dependency
-dependency
-  groupIdorg.apache.phoenix/groupId
-  artifactIdphoenix-core/artifactId
-  classifiertests/classifier
-  scopetest/scope
-/dependency
+
 !-- Force import of Spark's servlet API for unit tests --
 dependency
   groupIdjavax.servlet/groupId
@@ -59,16 +54,38 @@
   scopetest/scope
 /dependency
 
+!-- Mark Spark / Scala as provided --
 dependency
-  groupIdjunit/groupId
-  artifactIdjunit/artifactId
+  groupIdorg.scala-lang/groupId
+  artifactIdscala-library/artifactId
+  version${scala.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-core_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-sql_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+
+!-- Test dependencies --
+dependency
+  groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-core/artifactId
+  classifiertests/classifier
   scopetest/scope
 /dependency
 
 dependency
-  groupIdorg.scala-lang/groupId
-  artifactIdscala-library/artifactId
-  version${scala.version}/version
+  groupIdjunit/groupId
+  artifactIdjunit/artifactId
+  scopetest/scope
 /dependency
 
 dependency
@@ -86,18 +103,6 @@
 /dependency
 
 dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-core_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-sql_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-client/artifactId
   version${hadoop-two.version}/version



[20/34] phoenix git commit: PHOENIX-777 - Support null value for fixed length ARRAY (Dumindu Buddhika)

2015-07-20 Thread greid
PHOENIX-777 - Support null value for fixed length ARRAY (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6f890ade
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6f890ade
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6f890ade

Branch: refs/heads/4.x-HBase-1.1
Commit: 6f890ade0691d03469ff8fce81c2fa9edd6941af
Parents: 9c5f111
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 11:18:51 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 11:18:51 2015 +0530

--
 .../phoenix/end2end/ArraysWithNullsIT.java  | 300 +++
 .../phoenix/compile/ExpressionCompiler.java |   9 +-
 .../apache/phoenix/schema/types/PBinary.java|   2 +-
 .../org/apache/phoenix/schema/types/PChar.java  |   5 +-
 .../org/apache/phoenix/schema/types/PDate.java  |   6 +-
 .../apache/phoenix/schema/types/PDecimal.java   |   3 +
 .../apache/phoenix/schema/types/PTimestamp.java |  17 +-
 .../phoenix/schema/types/PhoenixArray.java  |  51 ++--
 8 files changed, 358 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f890ade/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
new file mode 100644
index 000..b034193
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertEquals;
+
+import java.sql.*;
+
+import org.apache.phoenix.schema.types.PTimestamp;
+import org.apache.phoenix.schema.types.PhoenixArray;
+import org.junit.Test;
+
+public class ArraysWithNullsIT extends BaseClientManagedTimeIT {
+
+@Test
+public void testArrayUpsertIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t1 ( k VARCHAR PRIMARY 
KEY, a INTEGER[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t1 
VALUES('a',ARRAY[null,3,null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t1 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(INTEGER,new Object[]{null,3,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+
+
+@Test
+public void testArrayUpsertVarcharWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t2 ( k VARCHAR PRIMARY 
KEY, a VARCHAR[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t2 
VALUES('a',ARRAY['10',null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t2 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(VARCHAR,new Object[]{10,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+@Test
+public void testArrayUpsertBigIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t3 ( k VARCHAR PRIMARY 
KEY, a BIGINT[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t3 
VALUES('a',ARRAY[2,null,32335,4]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t3 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(BIGINT,new 
Object[]{(long)2,null,(long)32335,(long)4});
+
+assertEquals(rs.getArray(1),array);
+conn.close();

[08/34] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)

2015-07-20 Thread greid
PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5546a422
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5546a422
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5546a422

Branch: refs/heads/4.x-HBase-1.1
Commit: 5546a42226e3f0fdf0cc89f1c175ff3da7a75d8c
Parents: c6b37b9
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 17:41:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 17:52:24 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java |  2 +-
 .../java/org/apache/phoenix/util/QueryUtil.java |  2 +-
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 20 
 3 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 2451603..3cfaacc 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -209,7 +209,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 url = url == null ?  : url;
 url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
 ? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
-: url;
+: PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + url;
 StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
index bd38983..a2d4a91 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
@@ -290,7 +290,7 @@ public final class QueryUtil {
 throws ClassNotFoundException,
 SQLException {
 String url = getConnectionUrl(props, conf);
-LOG.info(Creating connection with the jdbc url: + url);
+LOG.info(Creating connection with the jdbc url:  + url);
 PropertiesUtil.extractProperties(props, conf);
 return DriverManager.getConnection(url, props);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
index 083b205..4eda825 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
@@ -34,23 +34,33 @@ public class PhoenixEmbeddedDriverTest {
 @Test
 public void testGetConnectionInfo() throws SQLException {
 String[] urls = new String[] {
+null,
+,
 jdbc:phoenix,
 jdbc:phoenix;test=true,
 jdbc:phoenix:localhost,
+localhost,
+localhost;,
 jdbc:phoenix:localhost:123,
 jdbc:phoenix:localhost:123;foo=bar,
+localhost:123,
 jdbc:phoenix:localhost:123:/hbase,
 jdbc:phoenix:localhost:123:/foo-bar,
 jdbc:phoenix:localhost:123:/foo-bar;foo=bas,
+localhost:123:/foo-bar,
 jdbc:phoenix:localhost:/hbase,
 jdbc:phoenix:localhost:/foo-bar,
 jdbc:phoenix:localhost:/foo-bar;test=true,
+localhost:/foo-bar,
 jdbc:phoenix:v1,v2,v3,
 jdbc:phoenix:v1,v2,v3;,
 jdbc:phoenix:v1,v2,v3;test=true,
+v1,v2,v3,
 jdbc:phoenix:v1,v2,v3:/hbase,
 jdbc:phoenix:v1,v2,v3:/hbase;test=true,
+v1,v2,v3:/foo-bar,
 jdbc:phoenix:v1,v2,v3:123:/hbase,
+v1,v2,v3:123:/hbase,
 jdbc:phoenix:v1,v2,v3:123:/hbase;test=false,
 

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 73da0fb0d - 8eb9afeb6


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8eb9afeb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8eb9afeb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8eb9afeb

Branch: refs/heads/4.x-HBase-0.98
Commit: 8eb9afeb6d0024265c8a8526218ac1c35076ec80
Parents: 73da0fb
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:35 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8eb9afeb/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8eb9afeb/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[03/34] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-07-20 Thread greid
http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 2db1af6..015a660 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -54,7 +54,6 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-testing-util/artifactId
-  version${hbase.version}/version
   scopetest/scope
   optionaltrue/optional
   exclusions
@@ -67,7 +66,6 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
   exclusions
@@ -80,41 +78,56 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
-  version${hbase.version}/version
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-client/artifactId
-  version${hbase.version}/version
+/dependency
+   dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+  typetest-jar/type
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-client/artifactId
+  typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index adeed88..a232cf4 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -460,6 +460,13 @@
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+  version${hbase.version}/version
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
   version${hbase.version}/version
   typetest-jar/type

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/pom.xml
--
diff --git a/pom.xml b/pom.xml
index d310c37..4361e54 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,7 +78,7 @@
 test.output.tofiletrue/test.output.tofile
 
 !-- Hadoop Versions --
-hbase.version1.0.1/hbase.version
+hbase.version1.1.0/hbase.version
 hadoop-two.version2.5.1/hadoop-two.version
 
 !-- Dependency versions --
@@ -452,6 +452,11 @@
   !-- HBase dependencies --
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-annotations/artifactId
+version${hbase.version}/version
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
 artifactIdhbase-testing-util/artifactId
 version${hbase.version}/version
 scopetest/scope
@@ -488,13 +493,34 @@
   /dependency
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-common/artifactId
+version${hbase.version}/version
+typetest-jar/type
+scopetest/scope
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
 artifactIdhbase-client/artifactId
 version${hbase.version}/version
   /dependency
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-client/artifactId
+version${hbase.version}/version
+typetest-jar/type
+scopetest/scope
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
+artifactIdhbase-server/artifactId
+version${hbase.version}/version
+  /dependency
+  dependency
+

[24/34] phoenix git commit: PHOENIX-1968: Should support saving arrays

2015-07-20 Thread greid
PHOENIX-1968: Should support saving arrays


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f7d73496
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f7d73496
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f7d73496

Branch: refs/heads/4.x-HBase-1.1
Commit: f7d734966f7172c3bc4a6f0ba31594ba74ee91a1
Parents: bfd860f
Author: ravimagham ravimag...@apache.org
Authored: Thu Jun 11 12:59:48 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Thu Jun 11 12:59:48 2015 -0700

--
 .../apache/phoenix/spark/PhoenixSparkIT.scala   | 21 
 .../phoenix/spark/PhoenixRecordWritable.scala   | 25 
 2 files changed, 41 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7d73496/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
--
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 42e8676..5f256e6 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -415,4 +415,25 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
 
 results.toList shouldEqual checkResults
   }
+
+  test(Can save arrays back to phoenix) {
+val dataSet = List((2L, Array(String1, String2, String3)))
+
+sc
+  .parallelize(dataSet)
+  .saveToPhoenix(
+ARRAY_TEST_TABLE,
+Seq(ID,VCARRAY),
+zkUrl = Some(quorumAddress)
+  )
+
+// Load the results back
+val stmt = conn.createStatement()
+val rs = stmt.executeQuery(SELECT VCARRAY FROM ARRAY_TEST_TABLE WHERE ID 
= 2)
+rs.next()
+val sqlArray = rs.getArray(1).getArray().asInstanceOf[Array[String]]
+
+// Verify the arrays are equal
+sqlArray shouldEqual dataSet(0)._2
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7d73496/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
--
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
index 67e0bd2..3977657 100644
--- 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
+++ 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
@@ -16,11 +16,12 @@ package org.apache.phoenix.spark
 import java.sql.{PreparedStatement, ResultSet}
 import org.apache.hadoop.mapreduce.lib.db.DBWritable
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder
-import org.apache.phoenix.schema.types.{PDate, PhoenixArray}
+import org.apache.phoenix.schema.types.{PDataType, PDate, PhoenixArray}
 import org.joda.time.DateTime
 import scala.collection.{immutable, mutable}
 import scala.collection.JavaConversions._
 
+
 class PhoenixRecordWritable(var encodedColumns: String) extends DBWritable {
   val upsertValues = mutable.ArrayBuffer[Any]()
   val resultMap = mutable.Map[String, AnyRef]()
@@ -44,13 +45,27 @@ class PhoenixRecordWritable(var encodedColumns: String) 
extends DBWritable {
 upsertValues.zip(columns).zipWithIndex.foreach {
   case ((v, c), i) = {
 if (v != null) {
+
   // Both Java and Joda dates used to work in 4.2.3, but now they must 
be java.sql.Date
+  // Can override any other types here as needed
   val (finalObj, finalType) = v match {
-case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE.getSqlType)
-case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE.getSqlType)
-case _ = (v, c.getSqlType)
+case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE)
+case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE)
+case _ = (v, c.getPDataType)
+  }
+
+  // Save as array or object
+  finalObj match {
+case obj: Array[AnyRef] = {
+  // Create a java.sql.Array, need to lookup the base sql type name
+  val sqlArray = statement.getConnection.createArrayOf(
+PDataType.arrayBaseType(finalType).getSqlTypeName,
+obj
+  )
+  statement.setArray(i + 1, sqlArray)
+}
+case _ = statement.setObject(i + 1, finalObj)
   }
-  statement.setObject(i + 1, finalObj, finalType)
 } else {
   

[06/34] phoenix git commit: Changing version to 4.5.0-HBase-1.1-SNAPSHOT

2015-07-20 Thread greid
Changing version to 4.5.0-HBase-1.1-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3cdc3230
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3cdc3230
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3cdc3230

Branch: refs/heads/4.x-HBase-1.1
Commit: 3cdc3230c570ee8c22bb6c1bab975699fd02e94c
Parents: 56e1c0a
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon May 25 17:46:18 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon May 25 17:46:18 2015 +0530

--
 phoenix-assembly/pom.xml  | 2 +-
 phoenix-core/pom.xml  | 2 +-
 phoenix-flume/pom.xml | 2 +-
 phoenix-pherf/pom.xml | 2 +-
 phoenix-pig/pom.xml   | 2 +-
 phoenix-server-client/pom.xml | 2 +-
 phoenix-server/pom.xml| 2 +-
 phoenix-spark/pom.xml | 2 +-
 pom.xml   | 2 +-
 9 files changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 8d9a965..04d9335 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-assembly/artifactId
   namePhoenix Assembly/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 22e6b60..951e969 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-core/artifactId
   namePhoenix Core/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index b2b9a47..ea87ab0 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 0901f71..e751d73 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -22,7 +22,7 @@
 parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
 /parent
 
 artifactIdphoenix-pherf/artifactId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 015a660..957c06f 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 4d6fd45..748e57c 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-server-client/artifactId
   namePhoenix Query Server Client/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index 9f6289f..ab9a472 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+

phoenix git commit: PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)

2015-07-20 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 04e9d58d1 - 7e808368e


PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7e808368
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7e808368
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7e808368

Branch: refs/heads/4.x-HBase-1.0
Commit: 7e808368e5ba09c0463ee8737160b9a994a5fd79
Parents: 04e9d58
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Mon Jul 20 22:52:39 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Mon Jul 20 22:56:17 2015 +0530

--
 .../apache/phoenix/expression/ArrayToStringFunctionTest.java   | 6 +-
 1 file changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7e808368/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
index 92eb6b5..d219320 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
@@ -141,7 +141,11 @@ public class ArrayToStringFunctionTest {
 PhoenixArray arr = new PhoenixArray(base, o1);
 String delimiter = , ;
 String nullString = *;
-String expected = 1970-01-01, 1970-01-01, 1970-01-01;
+String expected = ;
+for (int i = 0; i  o1.length - 1; i++) {
+expected += o1[i].toString() + , ;
+}
+expected += o1[o1.length - 1];
 test(arr, type, null, null, delimiter, nullString, expected, 
SortOrder.ASC, SortOrder.ASC, SortOrder.ASC);
 test(arr, type, null, null, delimiter, nullString, expected, 
SortOrder.DESC, SortOrder.ASC, SortOrder.ASC);
 }



Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2015-07-20 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[ramkrishna] PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache-Phoenix | Master | Build Successful

2015-07-20 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/master

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master/lastCompletedBuild/testReport/

Changes
[ramkrishna] PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache-Phoenix | Master | Build Successful

2015-07-20 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/master

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master/lastCompletedBuild/testReport/

Changes
[gabrielr] PHOENIX-2131 Closing paren in CastParseNode SQL



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


phoenix git commit: PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)

2015-07-20 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/master b38a62431 - 936de8815


PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/936de881
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/936de881
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/936de881

Branch: refs/heads/master
Commit: 936de881575423c9242d083e5852e82017905977
Parents: b38a624
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Mon Jul 20 22:52:39 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Mon Jul 20 22:52:39 2015 +0530

--
 .../apache/phoenix/expression/ArrayToStringFunctionTest.java  | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/936de881/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
index d97f117..5ca266d 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
@@ -135,7 +135,6 @@ public class ArrayToStringFunctionTest {
 }
 
 @Test
-@Ignore
 public void testDate() throws SQLException {
 PDataType type = PDateArray.INSTANCE;
 PDataType base = PDate.INSTANCE;
@@ -143,7 +142,11 @@ public class ArrayToStringFunctionTest {
 PhoenixArray arr = new PhoenixArray(base, o1);
 String delimiter = , ;
 String nullString = *;
-String expected = 1970-01-01, 1970-01-01, 1970-01-01;
+String expected = ;
+for (int i = 0; i  o1.length - 1; i++) {
+expected += o1[i].toString() + , ;
+}
+expected += o1[o1.length - 1];
 test(arr, type, null, null, delimiter, nullString, expected, 
SortOrder.ASC, SortOrder.ASC, SortOrder.ASC);
 test(arr, type, null, null, delimiter, nullString, expected, 
SortOrder.DESC, SortOrder.ASC, SortOrder.ASC);
 }



phoenix git commit: PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)

2015-07-20 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 8eb9afeb6 - b08ce6c98


PHOENIX-2129 Fix ArrayToStringFunctionTest#testDate (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b08ce6c9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b08ce6c9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b08ce6c9

Branch: refs/heads/4.x-HBase-0.98
Commit: b08ce6c984de7a3ffa0f510abcef1daf9e417f8e
Parents: 8eb9afe
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Mon Jul 20 22:52:39 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Mon Jul 20 22:56:53 2015 +0530

--
 .../apache/phoenix/expression/ArrayToStringFunctionTest.java  | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b08ce6c9/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
index d97f117..5ca266d 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayToStringFunctionTest.java
@@ -135,7 +135,6 @@ public class ArrayToStringFunctionTest {
 }
 
 @Test
-@Ignore
 public void testDate() throws SQLException {
 PDataType type = PDateArray.INSTANCE;
 PDataType base = PDate.INSTANCE;
@@ -143,7 +142,11 @@ public class ArrayToStringFunctionTest {
 PhoenixArray arr = new PhoenixArray(base, o1);
 String delimiter = , ;
 String nullString = *;
-String expected = 1970-01-01, 1970-01-01, 1970-01-01;
+String expected = ;
+for (int i = 0; i  o1.length - 1; i++) {
+expected += o1[i].toString() + , ;
+}
+expected += o1[o1.length - 1];
 test(arr, type, null, null, delimiter, nullString, expected, 
SortOrder.ASC, SortOrder.ASC, SortOrder.ASC);
 test(arr, type, null, null, delimiter, nullString, expected, 
SortOrder.DESC, SortOrder.ASC, SortOrder.ASC);
 }



[31/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
index 60d2020..2c91dc5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDataType.java
@@ -47,1060 +47,1036 @@ import com.google.common.primitives.Longs;
  */
 public abstract class PDataTypeT implements DataTypeT, 
ComparablePDataType? {
 
-  private final String sqlTypeName;
-  private final int sqlType;
-  private final Class clazz;
-  private final byte[] clazzNameBytes;
-  private final byte[] sqlTypeNameBytes;
-  private final PDataCodec codec;
-  private final int ordinal;
-
-  protected PDataType(String sqlTypeName, int sqlType, Class clazz, PDataCodec 
codec, int ordinal) {
-this.sqlTypeName = sqlTypeName;
-this.sqlType = sqlType;
-this.clazz = clazz;
-this.clazzNameBytes = Bytes.toBytes(clazz.getName());
-this.sqlTypeNameBytes = Bytes.toBytes(sqlTypeName);
-this.codec = codec;
-this.ordinal = ordinal;
-  }
-
-  @Deprecated
-  public static PDataType[] values() {
-return PDataTypeFactory.getInstance().getOrderedTypes();
-  }
-
-  @Deprecated
-  public int ordinal() {
-return ordinal;
-  }
-
-  @Override
-  public ClassT encodedClass() {
-return getJavaClass();
-  }
-
-  public boolean isCastableTo(PDataType targetType) {
-return isComparableTo(targetType);
-  }
-
-  public final PDataCodec getCodec() {
-return codec;
-  }
-
-  public boolean isBytesComparableWith(PDataType otherType) {
-return this == otherType
-|| this.getClass() == PVarbinary.class
-|| otherType == PVarbinary.INSTANCE
-|| this.getClass() == PBinary.class
-|| otherType == PBinary.INSTANCE;
-  }
-
-  public int estimateByteSize(Object o) {
-if (isFixedWidth()) {
-  return getByteSize();
-}
-if (isArrayType()) {
-  PhoenixArray array = (PhoenixArray) o;
-  int noOfElements = array.numElements;
-  int totalVarSize = 0;
-  for (int i = 0; i  noOfElements; i++) {
-totalVarSize += array.estimateByteSize(i);
-  }
-  return totalVarSize;
-}
-// Non fixed width types must override this
-throw new UnsupportedOperationException();
-  }
-
-  public Integer getMaxLength(Object o) {
-return null;
-  }
-
-  public Integer getScale(Object o) {
-return null;
-  }
-
-  /**
-   * Estimate the byte size from the type length. For example, for char, byte 
size would be the
-   * same as length. For decimal, byte size would have no correlation with the 
length.
-   */
-  public Integer estimateByteSizeFromLength(Integer length) {
-if (isFixedWidth()) {
-  return getByteSize();
-}
-if (isArrayType()) {
-  return null;
-}
-// If not fixed width, default to say the byte size is the same as length.
-return length;
-  }
-
-  public final String getSqlTypeName() {
-return sqlTypeName;
-  }
-
-  public final int getSqlType() {
-return sqlType;
-  }
-
-  public final Class getJavaClass() {
-return clazz;
-  }
-
-  public boolean isArrayType() {
-return false;
-  }
-
-  public final int compareTo(byte[] lhs, int lhsOffset, int lhsLength, 
SortOrder lhsSortOrder,
-  byte[] rhs, int rhsOffset, int rhsLength, SortOrder rhsSortOrder,
-  PDataType rhsType) {
-Preconditions.checkNotNull(lhsSortOrder);
-Preconditions.checkNotNull(rhsSortOrder);
-if (this.isBytesComparableWith(rhsType)) { // directly compare the bytes
-  return compareTo(lhs, lhsOffset, lhsLength, lhsSortOrder, rhs, 
rhsOffset, rhsLength,
-  rhsSortOrder);
-}
-PDataCodec lhsCodec = this.getCodec();
-if (lhsCodec
-== null) { // no lhs native type representation, so convert rhsType to 
bytes representation of lhsType
-  byte[] rhsConverted =
-  this.toBytes(this.toObject(rhs, rhsOffset, rhsLength, rhsType, 
rhsSortOrder));
-  if (rhsSortOrder == SortOrder.DESC) {
-rhsSortOrder = SortOrder.ASC;
-  }
-  if (lhsSortOrder == SortOrder.DESC) {
-lhs = SortOrder.invert(lhs, lhsOffset, new byte[lhsLength], 0, 
lhsLength);
-  }
-  return Bytes.compareTo(lhs, lhsOffset, lhsLength, rhsConverted, 0, 
rhsConverted.length);
-}
-PDataCodec rhsCodec = rhsType.getCodec();
-if (rhsCodec == null) {
-  byte[] lhsConverted =
-  rhsType.toBytes(rhsType.toObject(lhs, lhsOffset, lhsLength, this, 
lhsSortOrder));
-  if (lhsSortOrder == SortOrder.DESC) {
-lhsSortOrder = SortOrder.ASC;
-  }
-  if (rhsSortOrder == SortOrder.DESC) {
-rhs = SortOrder.invert(rhs, rhsOffset, new byte[rhsLength], 0, 
rhsLength);
-  }
-

[29/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedTimestampArray.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedTimestampArray.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedTimestampArray.java
index 1159b5c..3407310 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedTimestampArray.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PUnsignedTimestampArray.java
@@ -17,94 +17,80 @@
  */
 package org.apache.phoenix.schema.types;
 
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.phoenix.schema.SortOrder;
+import java.sql.Timestamp;
 
-import java.sql.*;
+import org.apache.phoenix.schema.SortOrder;
 
 public class PUnsignedTimestampArray extends PArrayDataTypeTimestamp[] {
 
-  public static final PUnsignedTimestampArray INSTANCE = new 
PUnsignedTimestampArray();
-
-  private PUnsignedTimestampArray() {
-super(UNSIGNED_TIMESTAMP ARRAY,
-PDataType.ARRAY_TYPE_BASE + PUnsignedTimestamp.INSTANCE.getSqlType(), 
PhoenixArray.class,
-null, 37);
-  }
-
-  @Override
-  public boolean isArrayType() {
-return true;
-  }
-
-  @Override
-  public boolean isFixedWidth() {
-return false;
-  }
+public static final PUnsignedTimestampArray INSTANCE = new 
PUnsignedTimestampArray();
 
-  @Override
-  public int compareTo(Object lhs, Object rhs, PDataType rhsType) {
-return compareTo(lhs, rhs);
-  }
+private PUnsignedTimestampArray() {
+super(UNSIGNED_TIMESTAMP ARRAY,
+PDataType.ARRAY_TYPE_BASE + 
PUnsignedTimestamp.INSTANCE.getSqlType(), PhoenixArray.class,
+null, 37);
+}
 
-  @Override
-  public Integer getByteSize() {
-return null;
-  }
+@Override
+public boolean isArrayType() {
+return true;
+}
 
-  @Override
-  public byte[] toBytes(Object object) {
-return toBytes(object, SortOrder.ASC);
-  }
+@Override
+public boolean isFixedWidth() {
+return false;
+}
 
-  @Override
-  public byte[] toBytes(Object object, SortOrder sortOrder) {
-return toBytes(object, PUnsignedTimestamp.INSTANCE, sortOrder);
-  }
+@Override
+public int compareTo(Object lhs, Object rhs, PDataType rhsType) {
+return compareTo(lhs, rhs);
+}
 
-  @Override
-  public Object toObject(byte[] bytes, int offset, int length,
-  PDataType actualType, SortOrder sortOrder, Integer maxLength,
-  Integer scale) {
-return toObject(bytes, offset, length, PUnsignedTimestamp.INSTANCE, 
sortOrder,
-maxLength, scale, PUnsignedTimestamp.INSTANCE);
-  }
+@Override
+public Integer getByteSize() {
+return null;
+}
 
-  @Override
-  public boolean isCoercibleTo(PDataType targetType) {
-return isCoercibleTo(targetType, this);
-  }
+@Override
+public byte[] toBytes(Object object) {
+return toBytes(object, SortOrder.ASC);
+}
 
-  @Override
-  public boolean isCoercibleTo(PDataType targetType, Object value) {
-if (value == null) {
-  return true;
+@Override
+public byte[] toBytes(Object object, SortOrder sortOrder) {
+return toBytes(object, PUnsignedTimestamp.INSTANCE, sortOrder);
 }
-PhoenixArray pArr = (PhoenixArray) value;
-Object[] timeStampArr = (Object[]) pArr.array;
-for (Object i : timeStampArr) {
-  if (!super.isCoercibleTo(PUnsignedTimestamp.INSTANCE, i)) {
-return false;
-  }
+
+@Override
+public Object toObject(byte[] bytes, int offset, int length,
+PDataType actualType, SortOrder sortOrder, Integer maxLength,
+Integer scale) {
+return toObject(bytes, offset, length, PUnsignedTimestamp.INSTANCE, 
sortOrder,
+maxLength, scale, PUnsignedTimestamp.INSTANCE);
 }
-return true;
-  }
 
-  @Override
-  public void coerceBytes(ImmutableBytesWritable ptr, Object object, PDataType 
actualType,
-  Integer maxLength, Integer scale, SortOrder actualModifer, Integer 
desiredMaxLength,
-  Integer desiredScale, SortOrder desiredModifier) {
-coerceBytes(ptr, object, actualType, maxLength, scale, desiredMaxLength, 
desiredScale,
-this, actualModifer, desiredModifier);
-  }
+@Override
+public boolean isCoercibleTo(PDataType targetType) {
+return isCoercibleTo(targetType, this);
+}
 
-  @Override
-  public int getResultSetSqlType() {
-return Types.ARRAY;
-  }
+@Override
+public boolean isCoercibleTo(PDataType targetType, Object value) {
+if (value == null) {
+return true;
+}
+PhoenixArray pArr = (PhoenixArray) value;
+Object[] timeStampArr = (Object[]) pArr.array;
+for (Object i : timeStampArr) {
+if (!super.isCoercibleTo(PUnsignedTimestamp.INSTANCE, 

[28/50] [abbrv] phoenix git commit: PHOENIX-2058 Check for existence and compatibility of columns being added in view

2015-07-20 Thread maryannxue
PHOENIX-2058 Check for existence and compatibility of columns being added in 
view


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/01b4f605
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/01b4f605
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/01b4f605

Branch: refs/heads/calcite
Commit: 01b4f6055911aa5036aa1e3a79340f80eb08396a
Parents: 66705d5
Author: Thomas D'Silva tdsi...@salesforce.com
Authored: Fri Jul 10 11:55:55 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Mon Jul 13 17:37:58 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 311 ---
 .../java/org/apache/phoenix/end2end/ViewIT.java |   6 +
 .../coprocessor/MetaDataEndpointImpl.java   |  64 ++--
 .../apache/phoenix/schema/MetaDataClient.java   |   9 +-
 4 files changed, 310 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/01b4f605/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 0425933..607f52a 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -2009,7 +2009,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 }
 
 @Test
-public void testAddNewColumnToBaseTableWithViews() throws Exception {
+public void testAddNewColumnsToBaseTableWithViews() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 try {   
 conn.createStatement().execute(CREATE TABLE IF NOT EXISTS 
TABLEWITHVIEW (
@@ -2018,13 +2018,15 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 +  COL2 bigint NOT NULL,
 +  CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)
 +  ));
-assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, -1, ID, COL1, COL2);
+assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, QueryConstants.BASE_TABLE_BASE_COLUMN_COUNT, ID, COL1, COL2);
 
 conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 DECIMAL(10,2), VIEW_COL2 VARCHAR ) AS SELECT * FROM TABLEWITHVIEW);
 assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 5, 3, ID, COL1, COL2, VIEW_COL1, VIEW_COL2);
 
-conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD COL3 
char(10));
-assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 1, 6, 4, ID, COL1, COL2, COL3, VIEW_COL1, 
VIEW_COL2);
+// adding a new pk column and a new regular column
+conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD COL3 
varchar(10) PRIMARY KEY, COL4 integer);
+assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 1, 5, QueryConstants.BASE_TABLE_BASE_COLUMN_COUNT, ID, COL1, COL2, 
COL3, COL4);
+assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 1, 7, 5, ID, COL1, COL2, COL3, COL4, VIEW_COL1, 
VIEW_COL2);
 } finally {
 conn.close();
 }
@@ -2040,13 +2042,13 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 +  COL2 bigint NOT NULL,
 +  CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)
 +  ));
-assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, -1, ID, COL1, COL2);
+assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, QueryConstants.BASE_TABLE_BASE_COLUMN_COUNT, ID, COL1, COL2);
 
-conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 DECIMAL(10,2), VIEW_COL2 VARCHAR(256), VIEW_COL3 VARCHAR, VIEW_COL4 
DECIMAL ) AS SELECT * FROM TABLEWITHVIEW);
-assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 7, 3, ID, COL1, COL2, VIEW_COL1, VIEW_COL2, 
VIEW_COL3, VIEW_COL4);
+conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 DECIMAL(10,2), VIEW_COL2 VARCHAR(256), VIEW_COL3 VARCHAR, VIEW_COL4 
DECIMAL, VIEW_COL5 DECIMAL(10,2), VIEW_COL6 VARCHAR, CONSTRAINT pk PRIMARY KEY 
(VIEW_COL5, VIEW_COL6) ) AS SELECT * FROM TABLEWITHVIEW);
+assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 9, 3, ID, COL1, COL2, VIEW_COL1, VIEW_COL2, 
VIEW_COL3, VIEW_COL4, VIEW_COL5, VIEW_COL6);
 
 

[34/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 7b39a28..e12f5a4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -150,8 +150,10 @@ public class UpsertCompiler {
 
SQLExceptionCode.DATA_EXCEEDS_MAX_CAPACITY).setColumnName(column.getName().getString())
 .setMessage(value= + 
column.getDataType().toStringLiteral(ptr, null)).build()
 .buildException(); }
-column.getDataType().coerceBytes(ptr, value, 
column.getDataType(), precision, scale,
-SortOrder.getDefault(), column.getMaxLength(), 
column.getScale(), column.getSortOrder());
+column.getDataType().coerceBytes(ptr, value, 
column.getDataType(), 
+precision, scale, SortOrder.getDefault(), 
+column.getMaxLength(), column.getScale(), 
column.getSortOrder(),
+table.rowKeyOrderOptimizable());
 values[i] = ByteUtil.copyKeyBytesIfNecessary(ptr);
 }
 setValues(values, pkSlotIndexes, columnIndexes, table, 
mutation, statement);
@@ -772,6 +774,7 @@ public class UpsertCompiler {
 final SequenceManager sequenceManager = 
context.getSequenceManager();
 // Next evaluate all the expressions
 int nodeIndex = nodeIndexOffset;
+PTable table = tableRef.getTable();
 Tuple tuple = sequenceManager.getSequenceCount() == 0 ? null :
 sequenceManager.newSequenceTuple(null);
 for (Expression constantExpression : constantExpressions) {
@@ -793,9 +796,10 @@ public class UpsertCompiler {
 .setMessage(value= + 
constantExpression.toString()).build().buildException();
 }
 }
-column.getDataType().coerceBytes(ptr, value,
-constantExpression.getDataType(), 
constantExpression.getMaxLength(), constantExpression.getScale(), 
constantExpression.getSortOrder(),
-column.getMaxLength(), 
column.getScale(),column.getSortOrder());
+column.getDataType().coerceBytes(ptr, value, 
constantExpression.getDataType(), 
+constantExpression.getMaxLength(), 
constantExpression.getScale(), constantExpression.getSortOrder(),
+column.getMaxLength(), 
column.getScale(),column.getSortOrder(),
+table.rowKeyOrderOptimizable());
 if (overlapViewColumns.contains(column)  
Bytes.compareTo(ptr.get(), ptr.getOffset(), ptr.getLength(), 
column.getViewConstant(), 0, column.getViewConstant().length-1) != 0) {
 throw new SQLExceptionInfo.Builder(
 SQLExceptionCode.CANNOT_UPDATE_VIEW_COLUMN)
@@ -814,7 +818,7 @@ public class UpsertCompiler {
 }
 }
 MapImmutableBytesPtr, RowMutationState mutation = 
Maps.newHashMapWithExpectedSize(1);
-setValues(values, pkSlotIndexes, columnIndexes, 
tableRef.getTable(), mutation, statement);
+setValues(values, pkSlotIndexes, columnIndexes, table, 
mutation, statement);
 return new MutationState(tableRef, mutation, 0, maxSize, 
connection);
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
index 0cbef11..332f293 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
@@ -61,7 +61,9 @@ import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.RowKeySchema;
 import org.apache.phoenix.schema.SaltingUtil;
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.ValueSchema.Field;
 import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.schema.types.PChar;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PVarbinary;
@@ -194,8 +196,9 @@ public 

[38/50] [abbrv] phoenix git commit: PHOENIX-2111 Race condition on creation of new view and adding of column to base table

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/9f09f1a5/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/MetaDataProtos.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/MetaDataProtos.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/MetaDataProtos.java
index acb32d2..a121d28 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/MetaDataProtos.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/MetaDataProtos.java
@@ -1811,6 +1811,16 @@ public final class MetaDataProtos {
  * coderequired int64 clientTimestamp = 5;/code
  */
 long getClientTimestamp();
+
+// optional int32 clientVersion = 6;
+/**
+ * codeoptional int32 clientVersion = 6;/code
+ */
+boolean hasClientVersion();
+/**
+ * codeoptional int32 clientVersion = 6;/code
+ */
+int getClientVersion();
   }
   /**
* Protobuf type {@code GetTableRequest}
@@ -1888,6 +1898,11 @@ public final class MetaDataProtos {
   clientTimestamp_ = input.readInt64();
   break;
 }
+case 48: {
+  bitField0_ |= 0x0020;
+  clientVersion_ = input.readInt32();
+  break;
+}
   }
 }
   } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -2008,12 +2023,29 @@ public final class MetaDataProtos {
   return clientTimestamp_;
 }
 
+// optional int32 clientVersion = 6;
+public static final int CLIENTVERSION_FIELD_NUMBER = 6;
+private int clientVersion_;
+/**
+ * codeoptional int32 clientVersion = 6;/code
+ */
+public boolean hasClientVersion() {
+  return ((bitField0_  0x0020) == 0x0020);
+}
+/**
+ * codeoptional int32 clientVersion = 6;/code
+ */
+public int getClientVersion() {
+  return clientVersion_;
+}
+
 private void initFields() {
   tenantId_ = com.google.protobuf.ByteString.EMPTY;
   schemaName_ = com.google.protobuf.ByteString.EMPTY;
   tableName_ = com.google.protobuf.ByteString.EMPTY;
   tableTimestamp_ = 0L;
   clientTimestamp_ = 0L;
+  clientVersion_ = 0;
 }
 private byte memoizedIsInitialized = -1;
 public final boolean isInitialized() {
@@ -2062,6 +2094,9 @@ public final class MetaDataProtos {
   if (((bitField0_  0x0010) == 0x0010)) {
 output.writeInt64(5, clientTimestamp_);
   }
+  if (((bitField0_  0x0020) == 0x0020)) {
+output.writeInt32(6, clientVersion_);
+  }
   getUnknownFields().writeTo(output);
 }
 
@@ -2091,6 +2126,10 @@ public final class MetaDataProtos {
 size += com.google.protobuf.CodedOutputStream
   .computeInt64Size(5, clientTimestamp_);
   }
+  if (((bitField0_  0x0020) == 0x0020)) {
+size += com.google.protobuf.CodedOutputStream
+  .computeInt32Size(6, clientVersion_);
+  }
   size += getUnknownFields().getSerializedSize();
   memoizedSerializedSize = size;
   return size;
@@ -2139,6 +2178,11 @@ public final class MetaDataProtos {
 result = result  (getClientTimestamp()
 == other.getClientTimestamp());
   }
+  result = result  (hasClientVersion() == other.hasClientVersion());
+  if (hasClientVersion()) {
+result = result  (getClientVersion()
+== other.getClientVersion());
+  }
   result = result 
   getUnknownFields().equals(other.getUnknownFields());
   return result;
@@ -2172,6 +2216,10 @@ public final class MetaDataProtos {
 hash = (37 * hash) + CLIENTTIMESTAMP_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getClientTimestamp());
   }
+  if (hasClientVersion()) {
+hash = (37 * hash) + CLIENTVERSION_FIELD_NUMBER;
+hash = (53 * hash) + getClientVersion();
+  }
   hash = (29 * hash) + getUnknownFields().hashCode();
   memoizedHashCode = hash;
   return hash;
@@ -2291,6 +2339,8 @@ public final class MetaDataProtos {
 bitField0_ = (bitField0_  ~0x0008);
 clientTimestamp_ = 0L;
 bitField0_ = (bitField0_  ~0x0010);
+clientVersion_ = 0;
+bitField0_ = (bitField0_  ~0x0020);
 return this;
   }
 
@@ -2339,6 +2389,10 @@ public final class MetaDataProtos {
   to_bitField0_ |= 0x0010;
 }
 result.clientTimestamp_ = clientTimestamp_;
+if (((from_bitField0_  0x0020) == 0x0020)) {
+  to_bitField0_ |= 0x0020;
+}
+result.clientVersion_ = clientVersion_;
 result.bitField0_ = to_bitField0_;
 onBuilt();
 return result;
@@ -2370,6 +2424,9 @@ public final class MetaDataProtos {
 if (other.hasClientTimestamp()) {
   

[36/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
PHOENIX-2067 Sort order incorrect for variable length DESC columns


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4b99c632
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4b99c632
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4b99c632

Branch: refs/heads/calcite
Commit: 4b99c632c5e40251451e69fbe6d108f51e549e9e
Parents: 2620a80
Author: James Taylor jtay...@salesforce.com
Authored: Tue Jul 14 13:40:58 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Tue Jul 14 13:40:58 2015 -0700

--
 .../org/apache/phoenix/util/UpgradeUtil.java | 19 ---
 1 file changed, 16 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/4b99c632/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
index e59ea98..0ad6b9d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
@@ -80,12 +80,11 @@ import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.SaltingUtil;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.types.PBoolean;
+import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDecimal;
-import org.apache.phoenix.schema.types.PDecimalArray;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.schema.types.PVarchar;
-import org.apache.phoenix.schema.types.PVarcharArray;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -878,6 +877,20 @@ public class UpgradeUtil {
 }
 return otherTables;
 }
+
+// Return all types that are not fixed width that may need upgrading due 
to PHOENIX-2067
+// We exclude VARBINARY as we no longer support DESC for it.
+private static String getAffectedDataTypes() {
+StringBuilder buf = new StringBuilder(( + 
PVarchar.INSTANCE.getSqlType() + , + PDecimal.INSTANCE.getSqlType() + ,);
+for (PDataType type : PDataType.values()) {
+if (type.isArrayType()) {
+buf.append(type.getSqlType());
+buf.append(',');
+}
+}
+buf.setCharAt(buf.length()-1, ')');
+return buf.toString();
+}
 /**
  * Identify the tables that need to be upgraded due to PHOENIX-2067
  */
@@ -890,7 +903,7 @@ public class UpgradeUtil {
 WHERE COLUMN_NAME IS NOT NULL\n + 
 AND COLUMN_FAMILY IS NULL\n + 
 AND SORT_ORDER =  + SortOrder.DESC.getSystemValue() + \n + 
-AND DATA_TYPE IN ( + PVarchar.INSTANCE.getSqlType() + , + 
PDecimal.INSTANCE.getSqlType() + , + PVarcharArray.INSTANCE.getSqlType() + 
, + PDecimalArray.INSTANCE.getSqlType() + )\n +
+AND DATA_TYPE IN  + getAffectedDataTypes() + \n +
 GROUP BY TENANT_ID,TABLE_SCHEM,TABLE_NAME);
 SetString physicalTables = Sets.newHashSetWithExpectedSize(1024);
 ListString remainingTableNames = addPhysicalTables(conn, rs, 
PTableType.INDEX, physicalTables);



[27/50] [abbrv] phoenix git commit: PHOENIX-978: Allow views to extend parent's PK only if parent's last PK column is fixed length

2015-07-20 Thread maryannxue
PHOENIX-978: Allow views to extend parent's PK only if parent's last PK column 
is fixed length


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/66705d51
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/66705d51
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/66705d51

Branch: refs/heads/calcite
Commit: 66705d5170496bed7c39bd484517722ac8d09a13
Parents: a772a4a
Author: Eli Levine elilev...@apache.org
Authored: Wed Jul 8 18:46:43 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Mon Jul 13 17:36:58 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java|  9 ++
 .../java/org/apache/phoenix/end2end/ViewIT.java | 17 ++
 .../phoenix/exception/SQLExceptionCode.java |  1 +
 .../apache/phoenix/schema/MetaDataClient.java   | 33 
 4 files changed, 54 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/66705d51/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index fbaded0..0425933 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -64,7 +64,6 @@ import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.base.Objects;
@@ -2132,15 +2131,13 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 }
 
 @Test
-@Ignore
-// enable this test after 
https://issues.apache.org/jira/browse/PHOENIX-978 is fixed 
 public void testAddExistingViewPkColumnToBaseTableWithViews() throws 
Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 try {   
 conn.createStatement().execute(CREATE TABLE IF NOT EXISTS 
TABLEWITHVIEW (
 +  ID char(10) NOT NULL,
 +  COL1 integer NOT NULL,
-+  COL2 bigint NOT NULL,
++  COL2 integer NOT NULL,
 +  CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)
 +  ));
 assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, -1, ID, COL1, COL2);
@@ -2169,7 +2166,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 }
 
 // add the pk column of the view to the base table
-conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD 
VIEW_COL1 DECIMAL PRIMARY KEY);
+conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD 
VIEW_COL1 DECIMAL(10,2) PRIMARY KEY);
 assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 1, 4, -1, ID, COL1, COL2, VIEW_COL1);
 assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 1, 5, 4, ID, COL1, COL2, VIEW_COL1, VIEW_COL2);
 
@@ -2179,7 +2176,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(rs.getString(1), view1);
 assertEquals(rs.getInt(2), 12);
 assertEquals(rs.getInt(3), 13);
-assertEquals(rs.getInt(4), 14);
+assertEquals(rs.getBigDecimal(4).intValue(), 14);
 assertFalse(rs.next());
 
 // query view

http://git-wip-us.apache.org/repos/asf/phoenix/blob/66705d51/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index db38ab3..1d8af35 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.end2end;
 
 import static com.google.common.collect.Lists.newArrayListWithExpectedSize;
+import static 
org.apache.phoenix.exception.SQLExceptionCode.CANNOT_MODIFY_VIEW_PK;
 import static 
org.apache.phoenix.exception.SQLExceptionCode.NOT_NULLABLE_COLUMN_IN_ROW_KEY;
 import static org.apache.phoenix.util.TestUtil.analyzeTable;
 import static org.apache.phoenix.util.TestUtil.getAllSplits;
@@ -522,6 +523,22 @@ public class ViewIT extends BaseViewIT {
 assertPKs(rs, new String[] {K1, K2, K3, K4});
 }
 
+@Test
+public void 

[20/50] [abbrv] phoenix git commit: PHOENIX-2063 Addendum patch to fix connection usage

2015-07-20 Thread maryannxue
PHOENIX-2063 Addendum patch to fix connection usage


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/997de5e6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/997de5e6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/997de5e6

Branch: refs/heads/calcite
Commit: 997de5e6cab988018e723ed29a53da1bae61def4
Parents: 3b1bfa0
Author: James Taylor jtay...@salesforce.com
Authored: Thu Jul 9 22:55:25 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Thu Jul 9 22:55:25 2015 -0700

--
 .../phoenix/end2end/RowValueConstructorIT.java  | 28 +++-
 1 file changed, 21 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/997de5e6/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
index 0cf5455..b9e50a4 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
@@ -1426,10 +1426,11 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 
 @Test
 public void testCountDistinct1() throws Exception {
-Connection conn = DriverManager.getConnection(getUrl());
+Connection conn = nextConnection(getUrl());
 String ddl = CREATE TABLE regions1 (region_name VARCHAR PRIMARY KEY, 
a INTEGER, b INTEGER);
 conn.createStatement().execute(ddl);
-conn.commit();
+
+conn = nextConnection(getUrl());
 PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions1(region_name, a, b) VALUES('a', 6,3));
 stmt.execute();
 stmt = conn.prepareStatement(UPSERT INTO regions1(region_name, a, b) 
VALUES('b', 2,4));
@@ -1437,18 +1438,23 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 stmt = conn.prepareStatement(UPSERT INTO regions1(region_name, a, b) 
VALUES('c', 6,3));
 stmt.execute();
 conn.commit();
+
+conn = nextConnection(getUrl());
 ResultSet rs;
 rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions1);
 assertTrue(rs.next());
 assertEquals(2, rs.getInt(1));
+conn.close();
 }
 
 @Test
 public void testCountDistinct2() throws Exception {
-Connection conn = DriverManager.getConnection(getUrl());
+Connection conn = nextConnection(getUrl());
 String ddl = CREATE TABLE regions2 (region_name VARCHAR PRIMARY KEY, 
a VARCHAR, b VARCHAR);
 conn.createStatement().execute(ddl);
 conn.commit();
+
+conn = nextConnection(getUrl());
 PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions2(region_name, a, b) VALUES('a', 'fooo','abc'));
 stmt.execute();
 stmt = conn.prepareStatement(UPSERT INTO regions2(region_name, a, b) 
VALUES('b', 'off','bac'));
@@ -1456,6 +1462,8 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 stmt = conn.prepareStatement(UPSERT INTO regions2(region_name, a, b) 
VALUES('c', 'fooo', 'abc'));
 stmt.execute();
 conn.commit();
+
+conn = nextConnection(getUrl());
 ResultSet rs;
 rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions2);
 assertTrue(rs.next());
@@ -1464,10 +1472,11 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 
 @Test
 public void testCountDistinct3() throws Exception {
-Connection conn = DriverManager.getConnection(getUrl());
+Connection conn = nextConnection(getUrl());
 String ddl = CREATE TABLE regions3 (region_name VARCHAR PRIMARY KEY, 
a Boolean, b Boolean);
 conn.createStatement().execute(ddl);
-conn.commit();
+
+conn = nextConnection(getUrl());
 PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions3(region_name, a, b) VALUES('a', true, true));
 stmt.execute();
 stmt = conn.prepareStatement(UPSERT INTO regions3(region_name, a, b) 
VALUES('b', true, False));
@@ -1477,6 +1486,8 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 stmt = conn.prepareStatement(UPSERT INTO regions3(region_name, a, b) 
VALUES('d', true, false));
 stmt.execute();
 conn.commit();
+
+conn = nextConnection(getUrl());
 ResultSet rs;
 rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions3);
 

[46/50] [abbrv] phoenix git commit: PHOENIX-1890 Provide queries for adding/deleting jars to/from common place in hdfs which is used by dynamic class loader(Rajeshbabu)

2015-07-20 Thread maryannxue
PHOENIX-1890 Provide queries for adding/deleting jars to/from common place in 
hdfs which is used by dynamic class loader(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f006df54
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f006df54
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f006df54

Branch: refs/heads/calcite
Commit: f006df5451859eb9d22130bb46b58460eee49674
Parents: 236ce1c
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jul 16 23:18:38 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jul 16 23:18:38 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java |  67 --
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |  23 ++
 .../phoenix/compile/ListJarsQueryPlan.java  | 216 +++
 .../apache/phoenix/jdbc/PhoenixStatement.java   | 175 +++
 .../apache/phoenix/parse/AddJarsStatement.java  |  38 
 .../phoenix/parse/DeleteJarStatement.java   |  19 ++
 .../apache/phoenix/parse/ListJarsStatement.java |  34 +++
 .../apache/phoenix/parse/ParseNodeFactory.java  |  12 ++
 8 files changed, 564 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f006df54/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index e2b7b4c..cd1e380 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -207,9 +207,6 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 conf.set(DYNAMIC_JARS_DIR_KEY, string+/hbase/tmpjars);
 util.startMiniHBaseCluster(1, 1);
 UDFExpression.setConfig(conf);
-compileTestClass(MY_REVERSE_CLASS_NAME, MY_REVERSE_PROGRAM, 1);
-compileTestClass(MY_SUM_CLASS_NAME, MY_SUM_PROGRAM, 2);
-compileTestClass(MY_ARRAY_INDEX_CLASS_NAME, MY_ARRAY_INDEX_PROGRAM, 3);
 
 String clientPort = 
util.getConfiguration().get(QueryServices.ZOOKEEPER_PORT_ATTRIB);
 url =
@@ -217,10 +214,54 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 + clientPort + JDBC_PROTOCOL_TERMINATOR + 
PHOENIX_TEST_DRIVER_URL_PARAM;
 MapString, String props = Maps.newHashMapWithExpectedSize(1);
 props.put(QueryServices.ALLOW_USER_DEFINED_FUNCTIONS_ATTRIB, true);
+props.put(QueryServices.DYNAMIC_JARS_DIR_KEY,string+/hbase/tmpjars/);
 driver = initAndRegisterDriver(url, new 
ReadOnlyProps(props.entrySet().iterator()));
+compileTestClass(MY_REVERSE_CLASS_NAME, MY_REVERSE_PROGRAM, 1);
+compileTestClass(MY_SUM_CLASS_NAME, MY_SUM_PROGRAM, 2);
+compileTestClass(MY_ARRAY_INDEX_CLASS_NAME, MY_ARRAY_INDEX_PROGRAM, 3);
+compileTestClass(MY_ARRAY_INDEX_CLASS_NAME, MY_ARRAY_INDEX_PROGRAM, 4);
 }
 
 @Test
+public void testListJars() throws Exception {
+Connection conn = driver.connect(url, EMPTY_PROPS);
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery(list jars);
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar1.jar,
 rs.getString(jar_location));
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar2.jar,
 rs.getString(jar_location));
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar3.jar,
 rs.getString(jar_location));
+assertFalse(rs.next());
+}
+
+@Test
+public void testDeleteJar() throws Exception {
+Connection conn = driver.connect(url, EMPTY_PROPS);
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery(list jars);
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar1.jar,
 rs.getString(jar_location));
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar2.jar,
 rs.getString(jar_location));
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar3.jar,
 rs.getString(jar_location));
+assertTrue(rs.next());
+
assertEquals(util.getConfiguration().get(QueryServices.DYNAMIC_JARS_DIR_KEY)+/+myjar4.jar,
 

[42/50] [abbrv] phoenix git commit: PHOENIX-2121 Fix flapping RowValueConstructorIT

2015-07-20 Thread maryannxue
PHOENIX-2121 Fix flapping RowValueConstructorIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/553d3ccf
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/553d3ccf
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/553d3ccf

Branch: refs/heads/calcite
Commit: 553d3ccf40fbef78de9d8b14e2b774d2a2fc6597
Parents: a8f0d76
Author: Samarth samarth.j...@salesforce.com
Authored: Wed Jul 15 12:48:55 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Wed Jul 15 12:48:55 2015 -0700

--
 .../phoenix/end2end/RowValueConstructorIT.java  | 46 +++-
 1 file changed, 25 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/553d3ccf/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
index e5cfeb0..5bf0a1e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
@@ -1430,21 +1430,22 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 @Test
 public void testCountDistinct1() throws Exception {
 Connection conn = nextConnection(getUrl());
-String ddl = CREATE TABLE regions1 (region_name VARCHAR PRIMARY KEY, 
a INTEGER, b INTEGER);
+String tableName = testCountDistinct1rvc;
+String ddl = CREATE TABLE  + tableName +  (region_name VARCHAR 
PRIMARY KEY, a INTEGER, b INTEGER);
 conn.createStatement().execute(ddl);
 
 conn = nextConnection(getUrl());
-PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions1(region_name, a, b) VALUES('a', 6,3));
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO   + 
tableName +  (region_name, a, b) VALUES('a', 6,3));
 stmt.execute();
-stmt = conn.prepareStatement(UPSERT INTO regions1(region_name, a, b) 
VALUES('b', 2,4));
+stmt = conn.prepareStatement(UPSERT INTO   + tableName +  
(region_name, a, b) VALUES('b', 2,4));
 stmt.execute();
-stmt = conn.prepareStatement(UPSERT INTO regions1(region_name, a, b) 
VALUES('c', 6,3));
+stmt = conn.prepareStatement(UPSERT INTO   + tableName +  
(region_name, a, b) VALUES('c', 6,3));
 stmt.execute();
 conn.commit();
 
 conn = nextConnection(getUrl());
 ResultSet rs;
-rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions1);
+rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from  + tableName);
 assertTrue(rs.next());
 assertEquals(2, rs.getInt(1));
 conn.close();
@@ -1453,22 +1454,23 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 @Test
 public void testCountDistinct2() throws Exception {
 Connection conn = nextConnection(getUrl());
-String ddl = CREATE TABLE regions2 (region_name VARCHAR PRIMARY KEY, 
a VARCHAR, b VARCHAR);
+String tableName = testCountDistinct2rvc;
+String ddl = CREATE TABLE   + tableName +   (region_name VARCHAR 
PRIMARY KEY, a VARCHAR, b VARCHAR);
 conn.createStatement().execute(ddl);
 conn.commit();
 
 conn = nextConnection(getUrl());
-PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions2(region_name, a, b) VALUES('a', 'fooo','abc'));
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO   + 
tableName +  (region_name, a, b) VALUES('a', 'fooo','abc'));
 stmt.execute();
-stmt = conn.prepareStatement(UPSERT INTO regions2(region_name, a, b) 
VALUES('b', 'off','bac'));
+stmt = conn.prepareStatement(UPSERT INTO   + tableName +  
(region_name, a, b) VALUES('b', 'off','bac'));
 stmt.execute();
-stmt = conn.prepareStatement(UPSERT INTO regions2(region_name, a, b) 
VALUES('c', 'fooo', 'abc'));
+stmt = conn.prepareStatement(UPSERT INTO   + tableName +  
(region_name, a, b) VALUES('c', 'fooo', 'abc'));
 stmt.execute();
 conn.commit();
 
 conn = nextConnection(getUrl());
 ResultSet rs;
-rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions2);
+rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from   + tableName);
 assertTrue(rs.next());
 assertEquals(2, rs.getInt(1));
 }
@@ -1476,23 +1478,24 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 @Test
 public void 

[08/50] [abbrv] phoenix git commit: PHOENIX-2025 Phoenix-core's hbase-default.xml prevents HBaseTestingUtility from starting up in client apps (Mujtaba Chohan)

2015-07-20 Thread maryannxue
PHOENIX-2025 Phoenix-core's hbase-default.xml prevents HBaseTestingUtility from 
starting up in client apps (Mujtaba Chohan)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dee7a02f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dee7a02f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dee7a02f

Branch: refs/heads/calcite
Commit: dee7a02f92ec3928a76c0d7536bd69f65fb1d4ef
Parents: d0c8f9d
Author: Thomas D'Silva tdsi...@salesforce.com
Authored: Tue Jul 7 13:40:18 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Tue Jul 7 13:40:18 2015 -0700

--
 .../apache/phoenix/mapreduce/util/ConnectionUtil.java   |  9 +
 .../mapreduce/util/PhoenixConfigurationUtil.java| 12 
 2 files changed, 17 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dee7a02f/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/ConnectionUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/ConnectionUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/ConnectionUtil.java
index 294d4e9..000ce59 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/ConnectionUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/ConnectionUtil.java
@@ -54,7 +54,7 @@ public class ConnectionUtil {
  */
 public static Connection getInputConnection(final Configuration conf , 
final Properties props) throws SQLException {
 Preconditions.checkNotNull(conf);
-return getConnection(PhoenixConfigurationUtil.getInputCluster(conf),
+return getConnection(PhoenixConfigurationUtil.getInputCluster(conf), 
PhoenixConfigurationUtil.getClientPort(conf),
 PropertiesUtil.extractProperties(props, conf));
 }
 
@@ -77,7 +77,7 @@ public class ConnectionUtil {
  */
 public static Connection getOutputConnection(final Configuration conf, 
Properties props) throws SQLException {
 Preconditions.checkNotNull(conf);
-return getConnection(PhoenixConfigurationUtil.getOutputCluster(conf),
+return getConnection(PhoenixConfigurationUtil.getOutputCluster(conf), 
PhoenixConfigurationUtil.getClientPort(conf),
 PropertiesUtil.extractProperties(props, conf));
 }
 
@@ -85,11 +85,12 @@ public class ConnectionUtil {
  * Returns the {@link Connection} from a ZooKeeper cluster string.
  *
  * @param quorum a ZooKeeper quorum connection string
+ * @param clientPort a ZooKeeper client port
  * @return a Phoenix connection to the given connection string
  */
-private static Connection getConnection(final String quorum, Properties 
props) throws SQLException {
+private static Connection getConnection(final String quorum, final int 
clientPort, Properties props) throws SQLException {
 Preconditions.checkNotNull(quorum);
-return DriverManager.getConnection(QueryUtil.getUrl(quorum), props);
+return DriverManager.getConnection(QueryUtil.getUrl(quorum, 
clientPort), props);
 }
 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/dee7a02f/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
index 6e0e5e4..bba96ac 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtil.java
@@ -94,6 +94,8 @@ public final class PhoenixConfigurationUtil {
 public static final String MAPREDUCE_INPUT_CLUSTER_QUORUM = 
phoenix.mapreduce.input.cluster.quorum;
 
 public static final String MAPREDUCE_OUTPUT_CLUSTER_QUORUM = 
phoneix.mapreduce.output.cluster.quorum;
+
+public static final String HBASE_ZOOKEEPER_CLIENT_PORT = 
hbase.zookeeper.property.clientPort;
 
 public enum SchemaType {
 TABLE,
@@ -363,6 +365,16 @@ public final class PhoenixConfigurationUtil {
 }
 return quorum;
 }
+
+/**
+ * Returns the HBase Client Port
+ * @param configuration
+ * @return
+ */
+public static int getClientPort(final Configuration configuration) {
+Preconditions.checkNotNull(configuration);
+return 
Integer.parseInt(configuration.get(HBASE_ZOOKEEPER_CLIENT_PORT));
+}
 
 public static void loadHBaseConfiguration(Job job) throws IOException {
 // load 

[44/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns - ARRAY addendum (Dumindu Buddhika)

2015-07-20 Thread maryannxue
PHOENIX-2067 Sort order incorrect for variable length DESC columns - ARRAY
addendum (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/33d60506
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/33d60506
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/33d60506

Branch: refs/heads/calcite
Commit: 33d60506c5f2d4408a1df79f278d7a45d3401a27
Parents: ec93ec7
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Thu Jul 16 21:13:31 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Thu Jul 16 21:13:31 2015 +0530

--
 .../phoenix/schema/types/PArrayDataType.java|  14 +-
 .../expression/ArrayAppendFunctionTest.java |  76 ---
 .../expression/ArrayConcatFunctionTest.java | 129 +++
 .../expression/ArrayPrependFunctionTest.java|  61 +
 .../schema/types/PDataTypeForArraysTest.java|  70 +-
 5 files changed, 324 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/33d60506/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
index dd11569..9ca64c5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
@@ -379,10 +379,10 @@ public abstract class PArrayDataTypeT extends 
PDataTypeT {
 int currOffset = getOffset(bytes, arrayIndex, useShort, 
indexOffset);
 int elementLength = 0;
 if (arrayIndex == (noOfElements - 1)) {
-elementLength = bytes[currOffset + initPos] == 
QueryConstants.SEPARATOR_BYTE ? 0 : indexOffset
+elementLength = (bytes[currOffset + initPos] == 
QueryConstants.SEPARATOR_BYTE || bytes[currOffset + initPos] == 
QueryConstants.DESC_SEPARATOR_BYTE) ? 0 : indexOffset
 - (currOffset + initPos) - 3;
 } else {
-elementLength = bytes[currOffset + initPos] == 
QueryConstants.SEPARATOR_BYTE ? 0 : getOffset(bytes,
+elementLength = (bytes[currOffset + initPos] == 
QueryConstants.SEPARATOR_BYTE || bytes[currOffset + initPos] == 
QueryConstants.DESC_SEPARATOR_BYTE) ? 0 : getOffset(bytes,
 arrayIndex + 1, useShort, indexOffset) - 
currOffset - 1;
 }
 ptr.set(bytes, currOffset + initPos, elementLength);
@@ -426,10 +426,10 @@ public abstract class PArrayDataTypeT extends 
PDataTypeT {
 int currOffset = getOffset(bytes, arrayIndex, useShort, 
indexOffset);
 int elementLength = 0;
 if (arrayIndex == (noOfElements - 1)) {
-elementLength = bytes[currOffset + offset] == 
QueryConstants.SEPARATOR_BYTE ? 0 : indexOffset
+elementLength = (bytes[currOffset + offset] == 
QueryConstants.SEPARATOR_BYTE || bytes[currOffset + offset] == 
QueryConstants.DESC_SEPARATOR_BYTE) ? 0 : indexOffset
 - (currOffset + offset) - 3;
 } else {
-elementLength = bytes[currOffset + offset] == 
QueryConstants.SEPARATOR_BYTE ? 0 : getOffset(bytes,
+elementLength = (bytes[currOffset + offset] == 
QueryConstants.SEPARATOR_BYTE || bytes[currOffset + offset] == 
QueryConstants.DESC_SEPARATOR_BYTE) ? 0 : getOffset(bytes,
 arrayIndex + 1, useShort, indexOffset) - 
currOffset - 1;
 }
 ptr.set(bytes, currOffset + offset, elementLength);
@@ -831,7 +831,7 @@ public abstract class PArrayDataTypeT extends 
PDataTypeT {
 // count nulls at the end of array 1
 for (int index = actualLengthOfArray1 - 1; index  -1; index--) {
 int offset = getOffset(array1Bytes, index, !useIntArray1, 
array1BytesOffset + offsetArrayPositionArray1);
-if (array1Bytes[array1BytesOffset + offset] == 
QueryConstants.SEPARATOR_BYTE) {
+if (array1Bytes[array1BytesOffset + offset] == 
QueryConstants.SEPARATOR_BYTE || array1Bytes[array1BytesOffset + offset] == 
QueryConstants.DESC_SEPARATOR_BYTE) {
 nullsAtTheEndOfArray1++;
 } else {
 break;
@@ -1064,11 +1064,11 @@ public abstract class PArrayDataTypeT extends 
PDataTypeT {
 nextOff = getOffset(indexArr, countOfElementsRead + 1, 
useShort, indexOffset);
 }

[41/50] [abbrv] phoenix git commit: PHOENIX-2118 Remove/modfiy usages of Guava StopWatch and deprecated ComparisonChain methods

2015-07-20 Thread maryannxue
PHOENIX-2118 Remove/modfiy usages of Guava StopWatch and deprecated 
ComparisonChain methods


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a8f0d769
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a8f0d769
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a8f0d769

Branch: refs/heads/calcite
Commit: a8f0d7696b6ee105d55c7d3dce50563e816cd857
Parents: cf2bc55
Author: Samarth samarth.j...@salesforce.com
Authored: Wed Jul 15 12:21:09 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Wed Jul 15 12:21:09 2015 -0700

--
 .../phoenix/monitoring/MetricsStopWatch.java|  8 +-
 .../query/ConnectionQueryServicesImpl.java  |  4 +-
 .../java/org/apache/phoenix/query/KeyRange.java | 13 +---
 .../apache/phoenix/util/PhoenixStopWatch.java   | 81 
 4 files changed, 91 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a8f0d769/phoenix-core/src/main/java/org/apache/phoenix/monitoring/MetricsStopWatch.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/monitoring/MetricsStopWatch.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/monitoring/MetricsStopWatch.java
index bffb9ad..ee260a8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/monitoring/MetricsStopWatch.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/monitoring/MetricsStopWatch.java
@@ -17,23 +17,23 @@
  */
 package org.apache.phoenix.monitoring;
 
-import com.google.common.base.Stopwatch;
+import org.apache.phoenix.util.PhoenixStopWatch;
 
 /**
  * 
  * Stop watch that is cognizant of the fact whether or not metrics is enabled.
  * If metrics isn't enabled it doesn't do anything. Otherwise, it delegates
- * calls to a {@code Stopwatch}.
+ * calls to a {@code PhoenixStopWatch}.
  *
  */
 final class MetricsStopWatch {
 
 private final boolean isMetricsEnabled;
-private final Stopwatch stopwatch;
+private final PhoenixStopWatch stopwatch;
 
 MetricsStopWatch(boolean isMetricsEnabled) {
 this.isMetricsEnabled = isMetricsEnabled;
-this.stopwatch = new Stopwatch();
+this.stopwatch = new PhoenixStopWatch();
 }
 
 void start()  {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a8f0d769/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 52b038b..a17e28a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -158,6 +158,7 @@ import org.apache.phoenix.util.JDBCUtil;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixContextExecutor;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PhoenixStopWatch;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
@@ -167,7 +168,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
-import com.google.common.base.Stopwatch;
 import com.google.common.base.Throwables;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
@@ -757,7 +757,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
QueryServicesOptions.DEFAULT_DELAY_FOR_SCHEMA_UPDATE_CHECK);
 boolean success = false;
 int numTries = 1;
-Stopwatch watch = new Stopwatch();
+PhoenixStopWatch watch = new PhoenixStopWatch();
 watch.start();
 do {
 try {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a8f0d769/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java 
b/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
index bca55e8..0612046 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/KeyRange.java
@@ -80,23 +80,18 @@ public class KeyRange implements Writable {
 }
 };
 public static final ComparatorKeyRange COMPARATOR = new 
ComparatorKeyRange() {
-@SuppressWarnings(deprecation)
 @Override public int compare(KeyRange o1, KeyRange o2) {
   

[05/50] [abbrv] phoenix git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/phoenix

2015-07-20 Thread maryannxue
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/phoenix


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1e606d57
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1e606d57
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1e606d57

Branch: refs/heads/calcite
Commit: 1e606d579965b97254f2c30399ace004a8912a31
Parents: ade12a7 c398e18
Author: maryannxue wei@intel.com
Authored: Sat Jul 4 14:05:00 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Sat Jul 4 14:05:00 2015 -0400

--
 .../util/PhoenixConfigurationUtilTest.java  | 28 +--
 .../org/apache/phoenix/util/QueryUtilTest.java  | 36 ++--
 2 files changed, 58 insertions(+), 6 deletions(-)
--




[04/50] [abbrv] phoenix git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/phoenix

2015-07-20 Thread maryannxue
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/phoenix


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ade12a78
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ade12a78
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ade12a78

Branch: refs/heads/calcite
Commit: ade12a787401d0bc67feb7e9c9dc171ca6cc27fb
Parents: df42fa1 be5aba5
Author: maryannxue wei@intel.com
Authored: Sat Jul 4 13:11:21 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Sat Jul 4 13:11:21 2015 -0400

--
 .../org/apache/phoenix/end2end/ArrayIT.java | 119 +++
 .../expression/ArrayConstructorExpression.java  |  19 +--
 .../org/apache/phoenix/util/PhoenixRuntime.java |   3 +-
 .../java/org/apache/phoenix/util/QueryUtil.java |   4 +-
 .../org/apache/phoenix/util/SchemaUtil.java |  18 ++-
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   |   9 +-
 .../phoenix/pig/PhoenixHBaseStorerIT.java   |   4 +-
 .../apache/phoenix/pig/PhoenixHBaseStorage.java |  51 
 8 files changed, 178 insertions(+), 49 deletions(-)
--




[50/50] [abbrv] phoenix git commit: Fix compilation errors after merge

2015-07-20 Thread maryannxue
Fix compilation errors after merge


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/76e92a96
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/76e92a96
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/76e92a96

Branch: refs/heads/calcite
Commit: 76e92a96127fb9c828dab95dc7e6e66e0e4adc12
Parents: 597e001
Author: maryannxue wei@intel.com
Authored: Mon Jul 20 13:14:07 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Mon Jul 20 13:14:07 2015 -0400

--
 .../it/java/org/apache/phoenix/calcite/CalciteTest.java | 12 ++--
 .../phoenix/calcite/rel/PhoenixRelImplementorImpl.java  |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/76e92a96/phoenix-core/src/it/java/org/apache/phoenix/calcite/CalciteTest.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/calcite/CalciteTest.java 
b/phoenix-core/src/it/java/org/apache/phoenix/calcite/CalciteTest.java
index 1452ac9..08d272f 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/calcite/CalciteTest.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/calcite/CalciteTest.java
@@ -34,7 +34,7 @@ public class CalciteTest extends BaseClientManagedTimeIT {
 public static final String ATABLE_NAME = ATABLE;
 
 public static Start start() {
-return new Start(new Properties(), false);
+return new Start(getConnectionProps(false), false);
 }
 
 public static Start start(Properties props, boolean connectUsingModel) {
@@ -238,11 +238,11 @@ public class CalciteTest extends BaseClientManagedTimeIT {
 return connection;
 }
 
-private static Properties getMaterializationEnabledProps() {
+private static Properties getConnectionProps(boolean 
enableMaterialization) {
 Properties props = new Properties();
 props.setProperty(
 CalciteConnectionProperty.MATERIALIZATIONS_ENABLED.camelName(),
-Boolean.toString(true));
+Boolean.toString(enableMaterialization));
 props.setProperty(
 CalciteConnectionProperty.CREATE_MATERIALIZATIONS.camelName(),
 Boolean.toString(false));
@@ -859,7 +859,7 @@ public class CalciteTest extends BaseClientManagedTimeIT {
 } catch (Exception e) {
 throw new RuntimeException(e);
 }
-final Start start = start(getMaterializationEnabledProps(), false);
+final Start start = start(getConnectionProps(true), false);
 start.sql(select x_integer from aTable)
 .explainIs(PhoenixToEnumerableConverter\n +
  PhoenixToClientConverter\n +
@@ -888,7 +888,7 @@ public class CalciteTest extends BaseClientManagedTimeIT {
 start.sql(select a_string, b_string from aTable where a_string = 'a')
 .explainIs(PhoenixToEnumerableConverter\n +
  PhoenixToClientConverter\n +
-   PhoenixServerProject(0:A_STRING=[$0], 
0:B_STRING=[$3])\n +
+   PhoenixServerProject(A_STRING=[$0], 
B_STRING=[$3])\n +
  PhoenixTableScan(table=[[phoenix, IDX1]], 
filter=[=($0, 'a')])\n)
 .close();
 start.sql(select a_string, b_string from aTable where b_string = 'b')
@@ -902,7 +902,7 @@ public class CalciteTest extends BaseClientManagedTimeIT {
  PhoenixToClientConverter\n +
PhoenixServerProject(A_STRING=[$3], B_STRING=[$0], 
X_INTEGER=[$10], Y_INTEGER=[$11])\n +
  PhoenixTableScan(table=[[phoenix, IDX_FULL]], 
filter=[=($0, 'b')])\n)
-.close();
+.close();
 }
 
 @Test public void testConnectJoinHsqldb() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/76e92a96/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixRelImplementorImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixRelImplementorImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixRelImplementorImpl.java
index 2ae3838..28b4f51 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixRelImplementorImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/calcite/rel/PhoenixRelImplementorImpl.java
@@ -119,7 +119,7 @@ public class PhoenixRelImplementorImpl implements 
PhoenixRel.Implementor {
 PTableType.SUBQUERY, null, 
MetaDataProtocol.MIN_TABLE_TIMESTAMP, PTable.INITIAL_SEQ_NUM,
 null, null, columns, null, null, 
Collections.PTableemptyList(),
   

[10/50] [abbrv] phoenix git commit: PHOENIX-2095 Lower the default for phoenix.sequence.saltBuckets

2015-07-20 Thread maryannxue
PHOENIX-2095 Lower the default for phoenix.sequence.saltBuckets


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/efb941ae
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/efb941ae
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/efb941ae

Branch: refs/heads/calcite
Commit: efb941aef6062fe704460fd37f9cc062c2ed2eee
Parents: 973bccb
Author: Andrew Purtell apurt...@apache.org
Authored: Wed Jul 8 09:34:20 2015 -0700
Committer: Andrew Purtell apurt...@apache.org
Committed: Wed Jul 8 09:34:20 2015 -0700

--
 .../main/java/org/apache/phoenix/query/QueryServicesOptions.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/efb941ae/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java 
b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
index 3efd79f..ea81cf5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
@@ -174,7 +174,7 @@ public class QueryServicesOptions {
 /**
  * Use only first time SYSTEM.SEQUENCE table is created.
  */
-public static final int DEFAULT_SEQUENCE_TABLE_SALT_BUCKETS = 
SaltingUtil.MAX_BUCKET_NUM;
+public static final int DEFAULT_SEQUENCE_TABLE_SALT_BUCKETS = 0;
 /**
  * Default value for coprocessor priority is between SYSTEM and USER 
priority.
  */



[48/50] [abbrv] phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread maryannxue
PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b38a6243
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b38a6243
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b38a6243

Branch: refs/heads/calcite
Commit: b38a62431ee44df171c913097d18e2433c951466
Parents: b329e85
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:25:01 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b38a6243/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b38a6243/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[45/50] [abbrv] phoenix git commit: PHOENIX-2101 Implement ARRAY_TO_STRING built in function (Dumindu Buddhika)

2015-07-20 Thread maryannxue
PHOENIX-2101 Implement ARRAY_TO_STRING built in function (Dumindu
Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/236ce1c8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/236ce1c8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/236ce1c8

Branch: refs/heads/calcite
Commit: 236ce1c87c729c9beba5985e4fe3871e300176e6
Parents: 33d6050
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Thu Jul 16 21:20:36 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Thu Jul 16 21:20:36 2015 +0530

--
 .../end2end/ArrayToStringFunctionIT.java| 654 +++
 .../phoenix/expression/ExpressionType.java  |   4 +-
 .../function/ArrayToStringFunction.java |  84 +++
 .../phoenix/schema/types/PArrayDataType.java|  30 +
 .../expression/ArrayToStringFunctionTest.java   | 374 +++
 5 files changed, 1145 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/236ce1c8/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayToStringFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayToStringFunctionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayToStringFunctionIT.java
new file mode 100644
index 000..3b3f1d5
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayToStringFunctionIT.java
@@ -0,0 +1,654 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.*;
+
+import org.junit.Test;
+
+public class ArrayToStringFunctionIT extends BaseHBaseManagedTimeIT {
+private void initTables(Connection conn) throws Exception {
+String ddl = CREATE TABLE regions (region_name VARCHAR PRIMARY 
KEY,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints 
BIGINT[],chars CHAR(15)[],double1 DOUBLE,varchar1 VARCHAR,nullcheck 
INTEGER,chars2 CHAR(15)[]);
+conn.createStatement().execute(ddl);
+String dml = UPSERT INTO 
regions(region_name,varchars,integers,doubles,bigints,chars,double1,varchar1,nullcheck,chars2)
 VALUES('SF Bay Area', +
+ARRAY['2345','46345','23234'], +
+ARRAY[2345,46345,23234,456], +
+ARRAY[23.45,46.345,23.234,45.6,5.78], +
+ARRAY[12,34,56,78,910], +
+ARRAY['a','','c','ddd','e'], +
+23.45, +
+', ', +
+NULL, +
+ARRAY['a','','c','ddd','e','foo'] +
+);
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.execute();
+conn.commit();
+}
+
+@Test
+public void testArrayToStringFunctionVarchar1() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+initTables(conn);
+
+ResultSet rs;
+rs = conn.createStatement().executeQuery(SELECT 
ARRAY_TO_STRING(varchars, ',','*') FROM regions WHERE region_name = 'SF Bay 
Area');
+assertTrue(rs.next());
+
+String expected = 2345,46345,23234;
+
+assertEquals(expected, rs.getString(1));
+assertFalse(rs.next());
+}
+
+@Test
+public void testArrayToStringFunctionVarchar2() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+initTables(conn);
+
+ResultSet rs;
+rs = conn.createStatement().executeQuery(SELECT 
ARRAY_TO_STRING(varchars, ',') FROM regions WHERE region_name = 'SF Bay Area');
+assertTrue(rs.next());
+
+String expected = 2345,46345,23234;
+
+assertEquals(expected, rs.getString(1));
+assertFalse(rs.next());
+}
+
+@Test
+public void testArrayToStringFunctionVarchar3() throws Exception {
+Connection conn = 

[17/50] [abbrv] phoenix git commit: PHOENIX-2103 : Pig tests aren't dropping tables as expected between test runs

2015-07-20 Thread maryannxue
PHOENIX-2103 : Pig tests aren't dropping tables as expected between test runs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/984e6222
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/984e6222
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/984e6222

Branch: refs/heads/calcite
Commit: 984e62223c8aa507e9c044cecfc7fc92ffa42522
Parents: 11bdb0e
Author: ravimagham ravimag...@apache.org
Authored: Thu Jul 9 20:20:32 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Thu Jul 9 20:20:32 2015 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   | 134 ---
 1 file changed, 53 insertions(+), 81 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/984e6222/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 7fcf6ac..53a62ee 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -19,9 +19,11 @@
  */
 package org.apache.phoenix.pig;
 
-import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
+import static org.apache.phoenix.util.TestUtil.LOCALHOST;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -35,18 +37,11 @@ import java.util.Properties;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
-import org.apache.phoenix.jdbc.PhoenixDriver;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
-import org.apache.phoenix.util.TestUtil;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
-import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
 import org.apache.pig.builtin.mock.Storage;
 import org.apache.pig.builtin.mock.Storage.Data;
 import org.apache.pig.data.DataType;
@@ -54,20 +49,15 @@ import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
 import org.junit.After;
-import org.junit.AfterClass;
 import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
 import com.google.common.base.Preconditions;
 
 /**
  * 
  * Test class to run all the integration tests against a virtual map reduce 
cluster.
  */
-@Category(NeedsOwnMiniClusterTest.class)
-public class PhoenixHBaseLoaderIT {
+public class PhoenixHBaseLoaderIT extends BaseHBaseManagedTimeIT {
 
 private static final Log LOG = 
LogFactory.getLog(PhoenixHBaseLoaderIT.class);
 private static final String SCHEMA_NAME = T;
@@ -76,32 +66,16 @@ public class PhoenixHBaseLoaderIT {
 private static final String TABLE_FULL_NAME = 
SchemaUtil.getTableName(SCHEMA_NAME, TABLE_NAME);
 private static final String CASE_SENSITIVE_TABLE_NAME = 
SchemaUtil.getEscapedArgument(a);
 private static final String CASE_SENSITIVE_TABLE_FULL_NAME = 
SchemaUtil.getTableName(SCHEMA_NAME,CASE_SENSITIVE_TABLE_NAME);
-private static HBaseTestingUtility hbaseTestUtil;
-private static String zkQuorum;
-private static Connection conn;
-private static PigServer pigServer;
-private static Configuration conf;
-
-@BeforeClass
-public static void setUpBeforeClass() throws Exception {
-hbaseTestUtil = new HBaseTestingUtility();
-conf = hbaseTestUtil.getConfiguration();
-setUpConfigForMiniCluster(conf);
-conf.set(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
-hbaseTestUtil.startMiniCluster();
+private String zkQuorum;
+private Connection conn;
+private PigServer pigServer;
 
-Class.forName(PhoenixDriver.class.getName());
-zkQuorum = localhost: + hbaseTestUtil.getZkCluster().getClientPort();
-Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
-props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
-conn = 

[24/50] [abbrv] phoenix git commit: PHOENIX-2113 Include log4j.properties in Pherf so runtime logging is displayed on console by default

2015-07-20 Thread maryannxue
PHOENIX-2113 Include log4j.properties in Pherf so runtime logging is displayed 
on console by default


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/052836c7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/052836c7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/052836c7

Branch: refs/heads/calcite
Commit: 052836c72c97de6580dda22fb485b7e3fa6d277d
Parents: 300de02
Author: Mujtaba mujt...@apache.org
Authored: Mon Jul 13 11:50:47 2015 -0700
Committer: Mujtaba mujt...@apache.org
Committed: Mon Jul 13 11:50:47 2015 -0700

--
 phoenix-pherf/config/log4j.properties | 58 ++
 1 file changed, 58 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/052836c7/phoenix-pherf/config/log4j.properties
--
diff --git a/phoenix-pherf/config/log4j.properties 
b/phoenix-pherf/config/log4j.properties
new file mode 100644
index 000..a6673c4
--- /dev/null
+++ b/phoenix-pherf/config/log4j.properties
@@ -0,0 +1,58 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# License); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+log4j.rootLogger=INFO,console
+log4j.threshold=INFO
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=pherf.log
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} 
(%F:%M(%L)) - %m%n
+
+#
+# Null Appender
+#
+log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
+
+#
+# console
+# Add console to rootlogger above if you want to use this 
+#
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p 
%c{2}: %m%n
+
+# Custom Logging levels
+log4j.logger.org.apache.zookeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=ERROR
+log4j.logger.org.apache.hadoop.hbase.HBaseConfiguration=ERROR



[35/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
PHOENIX-2067 Sort order incorrect for variable length DESC columns


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2620a80c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2620a80c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2620a80c

Branch: refs/heads/calcite
Commit: 2620a80c1e35c0d214f06a1b16e99da5415a1a2c
Parents: 01b4f60
Author: James Taylor jtay...@salesforce.com
Authored: Mon Jul 13 11:17:37 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Tue Jul 14 10:55:27 2015 -0700

--
 dev/eclipse_prefs_phoenix.epf   |2 +-
 .../org/apache/phoenix/end2end/ArrayIT.java |   59 +
 .../org/apache/phoenix/end2end/IsNullIT.java|   52 +-
 .../apache/phoenix/end2end/LpadFunctionIT.java  |   24 +
 .../apache/phoenix/end2end/ReverseScanIT.java   |   30 +
 .../phoenix/end2end/RowValueConstructorIT.java  |7 +-
 .../apache/phoenix/end2end/SortOrderFIT.java|  563 -
 .../org/apache/phoenix/end2end/SortOrderIT.java |  572 +
 .../apache/phoenix/compile/FromCompiler.java|3 +-
 .../apache/phoenix/compile/JoinCompiler.java|8 +-
 .../apache/phoenix/compile/OrderByCompiler.java |4 +-
 .../phoenix/compile/OrderPreservingTracker.java |7 +-
 .../org/apache/phoenix/compile/ScanRanges.java  |5 +-
 .../compile/TupleProjectionCompiler.java|4 +-
 .../apache/phoenix/compile/UnionCompiler.java   |5 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   16 +-
 .../apache/phoenix/compile/WhereOptimizer.java  |   53 +-
 .../coprocessor/BaseScannerRegionObserver.java  |2 +
 .../coprocessor/MetaDataEndpointImpl.java   |   73 +-
 .../UngroupedAggregateRegionObserver.java   |  108 +-
 .../coprocessor/generated/PTableProtos.java |  105 +-
 .../phoenix/exception/SQLExceptionCode.java |1 +
 .../apache/phoenix/execute/BaseQueryPlan.java   |   14 +-
 .../DescVarLengthFastByteComparisons.java   |  219 ++
 .../expression/ArrayConstructorExpression.java  |2 +-
 .../phoenix/expression/OrderByExpression.java   |   13 +-
 .../RowValueConstructorExpression.java  |8 +-
 .../function/ArrayConcatFunction.java   |   11 +-
 .../function/ArrayModifierFunction.java |3 +-
 .../expression/function/LpadFunction.java   |8 +-
 .../expression/util/regex/JONIPattern.java  |5 +-
 .../apache/phoenix/filter/SkipScanFilter.java   |3 +-
 .../apache/phoenix/index/IndexMaintainer.java   |  127 +-
 .../phoenix/iterate/BaseResultIterators.java|  109 +-
 .../phoenix/iterate/OrderedResultIterator.java  |   52 +-
 .../apache/phoenix/jdbc/PhoenixConnection.java  |   29 +-
 .../query/ConnectionQueryServicesImpl.java  |   17 +-
 .../java/org/apache/phoenix/query/KeyRange.java |   14 -
 .../apache/phoenix/query/QueryConstants.java|3 +
 .../apache/phoenix/schema/DelegateTable.java|5 +
 .../apache/phoenix/schema/MetaDataClient.java   |   31 +-
 .../java/org/apache/phoenix/schema/PTable.java  |9 +
 .../org/apache/phoenix/schema/PTableImpl.java   |   78 +-
 .../org/apache/phoenix/schema/RowKeySchema.java |   44 +-
 .../phoenix/schema/RowKeyValueAccessor.java |   12 +-
 .../org/apache/phoenix/schema/ValueSchema.java  |   30 +-
 .../phoenix/schema/stats/StatisticsUtil.java|4 +-
 .../phoenix/schema/types/PArrayDataType.java|  682 +++---
 .../phoenix/schema/types/PBinaryArray.java  |  122 +-
 .../phoenix/schema/types/PBooleanArray.java |  112 +-
 .../apache/phoenix/schema/types/PCharArray.java |  128 +-
 .../apache/phoenix/schema/types/PDataType.java  | 2037 +-
 .../apache/phoenix/schema/types/PDateArray.java |  131 +-
 .../phoenix/schema/types/PDecimalArray.java |  126 +-
 .../phoenix/schema/types/PDoubleArray.java  |  128 +-
 .../phoenix/schema/types/PFloatArray.java   |  130 +-
 .../phoenix/schema/types/PIntegerArray.java |  130 +-
 .../apache/phoenix/schema/types/PLongArray.java |  130 +-
 .../phoenix/schema/types/PSmallintArray.java|  130 +-
 .../apache/phoenix/schema/types/PTimeArray.java |  133 +-
 .../phoenix/schema/types/PTimestampArray.java   |  132 +-
 .../phoenix/schema/types/PTinyintArray.java |  130 +-
 .../schema/types/PUnsignedDateArray.java|  128 +-
 .../schema/types/PUnsignedDoubleArray.java  |  136 +-
 .../schema/types/PUnsignedFloatArray.java   |  130 +-
 .../phoenix/schema/types/PUnsignedIntArray.java |  130 +-
 .../schema/types/PUnsignedLongArray.java|  130 +-
 .../schema/types/PUnsignedSmallintArray.java|  132 +-
 .../schema/types/PUnsignedTimeArray.java|  132 +-
 .../schema/types/PUnsignedTimestampArray.java   |  134 +-
 .../schema/types/PUnsignedTinyintArray.java |  132 +-
 .../phoenix/schema/types/PVarbinaryArray.java   |  130 +-
 .../phoenix/schema/types/PVarcharArray.java |  130 +-

[03/50] [abbrv] phoenix git commit: PHOENIX-922 Support SELECT without a from clause

2015-07-20 Thread maryannxue
PHOENIX-922 Support SELECT without a from clause


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/df42fa13
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/df42fa13
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/df42fa13

Branch: refs/heads/calcite
Commit: df42fa13072df8a738e0bf2c2a22b58c512a9dcf
Parents: f579e72
Author: maryannxue wei@intel.com
Authored: Sat Jul 4 13:11:08 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Sat Jul 4 13:11:08 2015 -0400

--
 .../org/apache/phoenix/end2end/SequenceIT.java  | 27 ++
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |  2 +-
 .../apache/phoenix/compile/FromCompiler.java| 13 +--
 .../phoenix/compile/ProjectionCompiler.java |  2 +-
 .../apache/phoenix/compile/QueryCompiler.java   |  9 +-
 .../compile/TupleProjectionCompiler.java|  1 +
 .../apache/phoenix/execute/BaseQueryPlan.java   |  9 +-
 .../phoenix/execute/EmptyTableQueryPlan.java| 96 
 .../apache/phoenix/parse/SelectStatement.java   |  6 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  1 +
 .../org/apache/phoenix/schema/TableRef.java |  2 +
 .../phoenix/compile/QueryCompilerTest.java  | 45 +
 .../apache/phoenix/parse/QueryParserTest.java   | 19 +---
 13 files changed, 195 insertions(+), 37 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/df42fa13/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
index 6f2ec82..4273022 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
@@ -1242,5 +1242,32 @@ public class SequenceIT extends BaseClientManagedTimeIT {
 assertEquals(4, rs.getLong(1));
 assertFalse(rs.next());
 }
+
+@Test
+public void testNoFromClause() throws Exception {
+ResultSet rs;
+nextConnection();
+conn.createStatement().execute(CREATE SEQUENCE myseq START WITH 1 
INCREMENT BY 1);
+conn.createStatement().execute(CREATE SEQUENCE anotherseq START WITH 
2 INCREMENT BY 3);
+nextConnection();
+rs = conn.createStatement().executeQuery(EXPLAIN SELECT NEXT VALUE 
FOR myseq);
+assertEquals(CLIENT RESERVE VALUES FROM 1 SEQUENCE, 
QueryUtil.getExplainPlan(rs));
+rs = conn.createStatement().executeQuery(SELECT NEXT VALUE FOR 
myseq);
+assertTrue(rs.next());
+assertEquals(1, rs.getInt(1));
+rs = conn.createStatement().executeQuery(EXPLAIN SELECT CURRENT VALUE 
FOR myseq);
+assertEquals(CLIENT RESERVE VALUES FROM 1 SEQUENCE, 
QueryUtil.getExplainPlan(rs));
+rs = conn.createStatement().executeQuery(SELECT CURRENT VALUE FOR 
myseq);
+assertTrue(rs.next());
+assertEquals(1, rs.getInt(1));
+rs = conn.createStatement().executeQuery(SELECT NEXT VALUE FOR myseq, 
NEXT VALUE FOR anotherseq);
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(2, rs.getInt(2));
+rs = conn.createStatement().executeQuery(SELECT CURRENT VALUE FOR 
myseq, NEXT VALUE FOR anotherseq);
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals(5, rs.getInt(2));
+}
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/df42fa13/phoenix-core/src/main/antlr3/PhoenixSQL.g
--
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g 
b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index 2a8d11b..69014a2 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -629,7 +629,7 @@ single_select returns [SelectStatement ret]
 @init{ contextStack.push(new ParseContext()); }
 :   SELECT (h=hintClause)? 
 (d=DISTINCT | ALL)? sel=select_list
-FROM from=parseFrom
+(FROM from=parseFrom)?
 (WHERE where=expression)?
 (GROUP BY group=group_by)?
 (HAVING having=expression)?

http://git-wip-us.apache.org/repos/asf/phoenix/blob/df42fa13/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
index bc753c9..30a2bb7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
+++ 

[15/50] [abbrv] phoenix git commit: PHOENIX-2063 Row value constructor doesn't work when used in COUNT DISTINCT (Dumindu Buddhika)

2015-07-20 Thread maryannxue
PHOENIX-2063 Row value constructor doesn't work when used in COUNT
DISTINCT (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fa2d79a0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fa2d79a0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fa2d79a0

Branch: refs/heads/calcite
Commit: fa2d79a0162497761dd4568e3f8f343063b6afaa
Parents: aa4c900
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Thu Jul 9 10:18:10 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Thu Jul 9 10:18:10 2015 +0530

--
 .../phoenix/end2end/RowValueConstructorIT.java  | 78 
 .../aggregator/ServerAggregators.java   |  1 +
 2 files changed, 79 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fa2d79a0/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
index e227eb0..0cf5455 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowValueConstructorIT.java
@@ -1423,4 +1423,82 @@ public class RowValueConstructorIT extends 
BaseClientManagedTimeIT {
 assertFalse(rs.next());
 conn.close();
 }
+
+@Test
+public void testCountDistinct1() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String ddl = CREATE TABLE regions1 (region_name VARCHAR PRIMARY KEY, 
a INTEGER, b INTEGER);
+conn.createStatement().execute(ddl);
+conn.commit();
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions1(region_name, a, b) VALUES('a', 6,3));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions1(region_name, a, b) 
VALUES('b', 2,4));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions1(region_name, a, b) 
VALUES('c', 6,3));
+stmt.execute();
+conn.commit();
+ResultSet rs;
+rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions1);
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+}
+
+@Test
+public void testCountDistinct2() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String ddl = CREATE TABLE regions2 (region_name VARCHAR PRIMARY KEY, 
a VARCHAR, b VARCHAR);
+conn.createStatement().execute(ddl);
+conn.commit();
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions2(region_name, a, b) VALUES('a', 'fooo','abc'));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions2(region_name, a, b) 
VALUES('b', 'off','bac'));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions2(region_name, a, b) 
VALUES('c', 'fooo', 'abc'));
+stmt.execute();
+conn.commit();
+ResultSet rs;
+rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions2);
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+}
+
+@Test
+public void testCountDistinct3() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String ddl = CREATE TABLE regions3 (region_name VARCHAR PRIMARY KEY, 
a Boolean, b Boolean);
+conn.createStatement().execute(ddl);
+conn.commit();
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO 
regions3(region_name, a, b) VALUES('a', true, true));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions3(region_name, a, b) 
VALUES('b', true, False));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions3(region_name, a, b) 
VALUES('c', true, true));
+stmt.execute();
+stmt = conn.prepareStatement(UPSERT INTO regions3(region_name, a, b) 
VALUES('d', true, false));
+stmt.execute();
+conn.commit();
+ResultSet rs;
+rs = conn.createStatement().executeQuery(SELECT COUNT(DISTINCT (a,b)) 
from regions3);
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+}
+
+@Test
+public void testCountDistinct4() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String ddl = CREATE TABLE regions4 (region_name VARCHAR PRIMARY KEY, 
a VARCHAR, b VARCHAR);
+conn.createStatement().execute(ddl);
+conn.commit();
+PreparedStatement stmt = 

[02/50] [abbrv] phoenix git commit: PHOENIX-2036 - PhoenixConfigurationUtil should provide a pre-normalize table name to PhoenixRuntime

2015-07-20 Thread maryannxue
PHOENIX-2036 - PhoenixConfigurationUtil should provide a pre-normalize table 
name to PhoenixRuntime


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c398e182
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c398e182
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c398e182

Branch: refs/heads/calcite
Commit: c398e1823d8fe16c729e3b75eaf754eef0702856
Parents: be5aba5
Author: ravimagham ravimag...@apache.org
Authored: Sat Jul 4 10:08:00 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Sat Jul 4 10:08:00 2015 -0700

--
 .../util/PhoenixConfigurationUtilTest.java  | 28 +--
 .../org/apache/phoenix/util/QueryUtilTest.java  | 36 ++--
 2 files changed, 58 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c398e182/phoenix-core/src/test/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtilTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtilTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtilTest.java
index f8f2a63..aa03501 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtilTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/mapreduce/util/PhoenixConfigurationUtilTest.java
@@ -73,7 +73,29 @@ public class PhoenixConfigurationUtilTest extends 
BaseConnectionlessQueryTest {
 configuration.set(HConstants.ZOOKEEPER_QUORUM, getUrl());
 PhoenixConfigurationUtil.setInputTableName(configuration, 
tableName);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
-final String expectedSelectStatement = SELECT 
\A_STRING\,\A_BINARY\,\0\.\COL1\ FROM  + 
SchemaUtil.getEscapedArgument(tableName) ; 
+final String expectedSelectStatement = SELECT 
\A_STRING\,\A_BINARY\,\0\.\COL1\ FROM  + tableName ; 
+assertEquals(expectedSelectStatement, selectStatement);
+} finally {
+conn.close();
+}
+}
+
+@Test
+public void testSelectStatementWithSchema() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl(), 
PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES));
+final String tableName = TEST_TABLE;
+final String schemaName = SchemaUtil.getEscapedArgument(schema);
+final String fullTableName = SchemaUtil.getTableName(schemaName, 
tableName);
+try {
+String ddl = CREATE TABLE + fullTableName + 
+  (a_string varchar not null, a_binary varbinary not 
null, col1 integer +
+  CONSTRAINT pk PRIMARY KEY (a_string, a_binary))\n;
+conn.createStatement().execute(ddl);
+final Configuration configuration = new Configuration ();
+configuration.set(HConstants.ZOOKEEPER_QUORUM, getUrl());
+PhoenixConfigurationUtil.setInputTableName(configuration, 
fullTableName);
+final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
+final String expectedSelectStatement = SELECT 
\A_STRING\,\A_BINARY\,\0\.\COL1\ FROM  + fullTableName; 
 assertEquals(expectedSelectStatement, selectStatement);
 } finally {
 conn.close();
@@ -94,7 +116,7 @@ public class PhoenixConfigurationUtilTest extends 
BaseConnectionlessQueryTest {
 PhoenixConfigurationUtil.setInputTableName(configuration, 
tableName);
 PhoenixConfigurationUtil.setSelectColumnNames(configuration, 
A_BINARY);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
-final String expectedSelectStatement = SELECT \A_BINARY\ FROM  
+ SchemaUtil.getEscapedArgument(tableName) ; 
+final String expectedSelectStatement = SELECT \A_BINARY\ FROM  
+ tableName ; 
 assertEquals(expectedSelectStatement, selectStatement);
 } finally {
 conn.close();
@@ -115,7 +137,7 @@ public class PhoenixConfigurationUtilTest extends 
BaseConnectionlessQueryTest {
 PhoenixConfigurationUtil.setSchemaType(configuration, 
SchemaType.QUERY);
 PhoenixConfigurationUtil.setInputTableName(configuration, 
tableName);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
-final String expectedSelectStatement = SELECT 
\ID\,\0\.\VCARRAY\ FROM  + SchemaUtil.getEscapedArgument(tableName) ; 
+final String expectedSelectStatement = SELECT 
\ID\,\0\.\VCARRAY\ FROM  + tableName ; 
 

[13/50] [abbrv] phoenix git commit: PHOENIX-2058 Check for existence and compatibility of columns being added in view

2015-07-20 Thread maryannxue
PHOENIX-2058 Check for existence and compatibility of columns being added in 
view


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8c9a6b8c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8c9a6b8c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8c9a6b8c

Branch: refs/heads/calcite
Commit: 8c9a6b8cb94c0380c8e99aeee18d3b7d8f6e37ef
Parents: 61f1900
Author: Thomas D'Silva tdsi...@salesforce.com
Authored: Thu Jul 2 21:19:11 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Wed Jul 8 15:36:00 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 221 +--
 .../coprocessor/MetaDataEndpointImpl.java   | 108 -
 .../apache/phoenix/schema/MetaDataClient.java   |   4 +-
 3 files changed, 302 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c9a6b8c/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 56bba9b..fbaded0 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -64,6 +64,7 @@ import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.base.Objects;
@@ -2009,7 +2010,7 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 }
 
 @Test
-public void testAddColumnToTableWithViews() throws Exception {
+public void testAddNewColumnToBaseTableWithViews() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 try {   
 conn.createStatement().execute(CREATE TABLE IF NOT EXISTS 
TABLEWITHVIEW (
@@ -2020,12 +2021,176 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 +  ));
 assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, -1, ID, COL1, COL2);
 
-conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 SMALLINT ) AS SELECT * FROM TABLEWITHVIEW);
-assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 4, 3, ID, COL1, COL2, VIEW_COL1);
+conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 DECIMAL(10,2), VIEW_COL2 VARCHAR ) AS SELECT * FROM TABLEWITHVIEW);
+assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 5, 3, ID, COL1, COL2, VIEW_COL1, VIEW_COL2);
 
 conn.createStatement().execute(ALTER TABLE TABLEWITHVIEW ADD COL3 
char(10));
-assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 1, 5, 4, ID, COL1, COL2, COL3, VIEW_COL1);
+assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 1, 6, 4, ID, COL1, COL2, COL3, VIEW_COL1, 
VIEW_COL2);
+} finally {
+conn.close();
+}
+}
+
+@Test
+public void testAddExistingViewColumnToBaseTableWithViews() throws 
Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+try {   
+conn.createStatement().execute(CREATE TABLE IF NOT EXISTS 
TABLEWITHVIEW (
++  ID char(10) NOT NULL,
++  COL1 integer NOT NULL,
++  COL2 bigint NOT NULL,
++  CONSTRAINT NAME_PK PRIMARY KEY (ID, COL1, COL2)
++  ));
+assertTableDefinition(conn, TABLEWITHVIEW, PTableType.TABLE, 
null, 0, 3, -1, ID, COL1, COL2);
+
+conn.createStatement().execute(CREATE VIEW VIEWOFTABLE ( 
VIEW_COL1 DECIMAL(10,2), VIEW_COL2 VARCHAR(256), VIEW_COL3 VARCHAR, VIEW_COL4 
DECIMAL ) AS SELECT * FROM TABLEWITHVIEW);
+assertTableDefinition(conn, VIEWOFTABLE, PTableType.VIEW, 
TABLEWITHVIEW, 0, 7, 3, ID, COL1, COL2, VIEW_COL1, VIEW_COL2, 
VIEW_COL3, VIEW_COL4);
+
+// upsert single row into view
+String dml = UPSERT INTO VIEWOFTABLE VALUES(?,?,?,?,?, ?, ?);
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.setString(1, view1);
+stmt.setInt(2, 12);
+stmt.setInt(3, 13);
+stmt.setInt(4, 14);
+stmt.setString(5, view5);
+stmt.setString(6, view6);
+stmt.setInt(7, 17);
+stmt.execute();
+conn.commit();
+  

[06/50] [abbrv] phoenix git commit: PHOENIX-2036 PhoenixConfigurationUtil should provide a pre-normalize table name to PhoenixRuntime

2015-07-20 Thread maryannxue
PHOENIX-2036 PhoenixConfigurationUtil should provide a pre-normalize table name 
to PhoenixRuntime

Update phoenix-spark to follow the same normalization requirement.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/39c982f9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/39c982f9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/39c982f9

Branch: refs/heads/calcite
Commit: 39c982f923033b97c477464d0c4e27221421774d
Parents: 1e606d5
Author: Josh Mahonin jmaho...@gmail.com
Authored: Mon Jul 6 19:39:31 2015 -0400
Committer: Josh Mahonin jmaho...@apache.org
Committed: Mon Jul 6 19:41:38 2015 -0400

--
 phoenix-spark/src/it/resources/setup.sql|  4 +-
 .../apache/phoenix/spark/PhoenixSparkIT.scala   | 58 
 .../org/apache/phoenix/spark/PhoenixRDD.scala   | 24 +++-
 3 files changed, 46 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/39c982f9/phoenix-spark/src/it/resources/setup.sql
--
diff --git a/phoenix-spark/src/it/resources/setup.sql 
b/phoenix-spark/src/it/resources/setup.sql
index 40157a2..154a996 100644
--- a/phoenix-spark/src/it/resources/setup.sql
+++ b/phoenix-spark/src/it/resources/setup.sql
@@ -32,4 +32,6 @@ CREATE TABLE ARRAY_TEST_TABLE (ID BIGINT NOT NULL PRIMARY 
KEY, VCARRAY VARCHAR[]
 UPSERT INTO ARRAY_TEST_TABLE (ID, VCARRAY) VALUES (1, ARRAY['String1', 
'String2', 'String3'])
 CREATE TABLE DATE_PREDICATE_TEST_TABLE (ID BIGINT NOT NULL, TIMESERIES_KEY 
TIMESTAMP NOT NULL CONSTRAINT pk PRIMARY KEY (ID, TIMESERIES_KEY))
 UPSERT INTO DATE_PREDICATE_TEST_TABLE (ID, TIMESERIES_KEY) VALUES (1, 
CAST(CURRENT_TIME() AS TIMESTAMP))
-CREATE TABLE OUTPUT_TEST_TABLE (id BIGINT NOT NULL PRIMARY KEY, col1 VARCHAR, 
col2 INTEGER, col3 DATE)
\ No newline at end of file
+CREATE TABLE OUTPUT_TEST_TABLE (id BIGINT NOT NULL PRIMARY KEY, col1 VARCHAR, 
col2 INTEGER, col3 DATE)
+CREATE TABLE CUSTOM_ENTITY.z02(id BIGINT NOT NULL PRIMARY KEY)
+UPSERT INTO CUSTOM_ENTITY.z02 (id) VALUES(1)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/39c982f9/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
--
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 5f256e6..e1c9df4 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -20,9 +20,9 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants, 
HBaseTestingUtility}
 import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT
 import org.apache.phoenix.query.BaseTest
-import org.apache.phoenix.schema.ColumnNotFoundException
+import org.apache.phoenix.schema.{TableNotFoundException, 
ColumnNotFoundException}
 import org.apache.phoenix.schema.types.PVarchar
-import org.apache.phoenix.util.ColumnInfo
+import org.apache.phoenix.util.{SchemaUtil, ColumnInfo}
 import org.apache.spark.sql.{SaveMode, execution, SQLContext}
 import org.apache.spark.sql.types.{LongType, DataType, StringType, StructField}
 import org.apache.spark.{SparkConf, SparkContext}
@@ -96,23 +96,6 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
 PhoenixSparkITHelper.doTeardown
   }
 
-  def buildSql(table: String, columns: Seq[String], predicate: 
Option[String]): String = {
-val query = SELECT %s FROM \%s\ format(columns.map(f = \ + f + 
\).mkString(, ), table)
-
-query + (predicate match {
-  case Some(p: String) =  WHERE  + p
-  case _ = 
-})
-  }
-
-  test(Can create valid SQL) {
-val rdd = new PhoenixRDD(sc, MyTable, Array(Foo, Bar),
-  conf = hbaseConfiguration)
-
-rdd.buildSql(MyTable, Array(Foo, Bar), None) should
-  equal(SELECT \Foo\, \Bar\ FROM \MyTable\)
-  }
-
   test(Can convert Phoenix schema) {
 val phoenixSchema = List(
   new ColumnInfo(varcharColumn, PVarchar.INSTANCE.getSqlType)
@@ -154,7 +137,9 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
 val sqlContext = new SQLContext(sc)
 
 
-val df1 = sqlContext.phoenixTableAsDataFrame(table3, Array(id, col1),
+val df1 = sqlContext.phoenixTableAsDataFrame(
+  SchemaUtil.getEscapedArgument(table3),
+  Array(id, col1),
   zkUrl = Some(quorumAddress))
 
 df1.registerTempTable(table3)
@@ -191,10 +176,12 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
   }
 
   test(Using a predicate referring to a non-existent column should 

[30/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDateArray.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDateArray.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDateArray.java
index 764401c..a07418c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDateArray.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PDateArray.java
@@ -17,93 +17,78 @@
  */
 package org.apache.phoenix.schema.types;
 
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.phoenix.schema.SortOrder;
-
-import java.sql.Types;
 import java.sql.Date;
 
-public class PDateArray extends PArrayDataTypeDate[] {
-
-  public static final PDateArray INSTANCE = new PDateArray();
-
-  private PDateArray() {
-super(DATE ARRAY, PDataType.ARRAY_TYPE_BASE + 
PDate.INSTANCE.getSqlType(), PhoenixArray.class,
-null, 40);
-  }
+import org.apache.phoenix.schema.SortOrder;
 
-  @Override
-  public boolean isArrayType() {
-return true;
-  }
+public class PDateArray extends PArrayDataTypeDate[] {
 
-  @Override
-  public boolean isFixedWidth() {
-return false;
-  }
+public static final PDateArray INSTANCE = new PDateArray();
 
-  @Override
-  public int compareTo(Object lhs, Object rhs, PDataType rhsType) {
-return compareTo(lhs, rhs);
-  }
+private PDateArray() {
+super(DATE ARRAY, PDataType.ARRAY_TYPE_BASE + 
PDate.INSTANCE.getSqlType(), PhoenixArray.class,
+null, 40);
+}
 
-  @Override
-  public Integer getByteSize() {
-return null;
-  }
+@Override
+public boolean isArrayType() {
+return true;
+}
 
-  @Override
-  public byte[] toBytes(Object object) {
-return toBytes(object, SortOrder.ASC);
-  }
+@Override
+public boolean isFixedWidth() {
+return false;
+}
 
-  @Override
-  public byte[] toBytes(Object object, SortOrder sortOrder) {
-return toBytes(object, PDate.INSTANCE, sortOrder);
-  }
+@Override
+public int compareTo(Object lhs, Object rhs, PDataType rhsType) {
+return compareTo(lhs, rhs);
+}
 
-  @Override
-  public Object toObject(byte[] bytes, int offset, int length,
-  PDataType actualType, SortOrder sortOrder, Integer maxLength, Integer 
scale) {
-return toObject(bytes, offset, length, PDate.INSTANCE, sortOrder, 
maxLength, scale,
-PDate.INSTANCE);
-  }
+@Override
+public Integer getByteSize() {
+return null;
+}
 
-  @Override
-  public boolean isCoercibleTo(PDataType targetType) {
-return isCoercibleTo(targetType, this);
-  }
+@Override
+public byte[] toBytes(Object object) {
+return toBytes(object, SortOrder.ASC);
+}
 
-  @Override
-  public boolean isCoercibleTo(PDataType targetType, Object value) {
-if (value == null) {
-  return true;
+@Override
+public byte[] toBytes(Object object, SortOrder sortOrder) {
+return toBytes(object, PDate.INSTANCE, sortOrder);
 }
-PhoenixArray pArr = (PhoenixArray) value;
-Object[] dateArr = (Object[]) pArr.array;
-for (Object i : dateArr) {
-  if (!super.isCoercibleTo(PDate.INSTANCE, i)) {
-return false;
-  }
+
+@Override
+public Object toObject(byte[] bytes, int offset, int length,
+PDataType actualType, SortOrder sortOrder, Integer maxLength, 
Integer scale) {
+return toObject(bytes, offset, length, PDate.INSTANCE, sortOrder, 
maxLength, scale,
+PDate.INSTANCE);
 }
-return true;
-  }
 
-  @Override
-  public int getResultSetSqlType() {
-return Types.ARRAY;
-  }
+@Override
+public boolean isCoercibleTo(PDataType targetType) {
+return isCoercibleTo(targetType, this);
+}
 
-  @Override
-  public void coerceBytes(ImmutableBytesWritable ptr, Object object, PDataType 
actualType,
-  Integer maxLength, Integer scale, SortOrder actualModifer, Integer 
desiredMaxLength,
-  Integer desiredScale,SortOrder desiredModifier) {
-coerceBytes(ptr, object, actualType, maxLength, scale, desiredMaxLength, 
desiredScale,
-this, actualModifer, desiredModifier);
-  }
+@Override
+public boolean isCoercibleTo(PDataType targetType, Object value) {
+if (value == null) {
+return true;
+}
+PhoenixArray pArr = (PhoenixArray) value;
+Object[] dateArr = (Object[]) pArr.array;
+for (Object i : dateArr) {
+if (!super.isCoercibleTo(PDate.INSTANCE, i)) {
+return false;
+}
+}
+return true;
+}
 
-  @Override
-  public Object getSampleValue(Integer maxLength, Integer arrayLength) {
-return getSampleValue(PDate.INSTANCE, arrayLength, maxLength);
-  }
+@Override
+public Object getSampleValue(Integer maxLength, Integer 

[33/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
index 0956753..a12f633 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
@@ -59,14 +59,10 @@ import 
org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixStatement;
-import org.apache.phoenix.parse.AndParseNode;
-import org.apache.phoenix.parse.BaseParseNodeVisitor;
-import org.apache.phoenix.parse.BooleanParseNodeVisitor;
 import org.apache.phoenix.parse.FunctionParseNode;
 import org.apache.phoenix.parse.ParseNode;
 import org.apache.phoenix.parse.SQLParser;
 import org.apache.phoenix.parse.StatelessTraverseAllParseNodeVisitor;
-import org.apache.phoenix.parse.TraverseAllParseNodeVisitor;
 import org.apache.phoenix.parse.UDFParseNode;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.ColumnNotFoundException;
@@ -265,6 +261,7 @@ public class IndexMaintainer implements Writable, 
IterableColumnReference {
 private int[] dataPkPosition;
 private int maxTrailingNulls;
 private ColumnReference dataEmptyKeyValueRef;
+private boolean rowKeyOrderOptimizable;
 
 private IndexMaintainer(RowKeySchema dataRowKeySchema, boolean 
isDataTableSalted) {
 this.dataRowKeySchema = dataRowKeySchema;
@@ -273,6 +270,7 @@ public class IndexMaintainer implements Writable, 
IterableColumnReference {
 
 private IndexMaintainer(PTable dataTable, PTable index, PhoenixConnection 
connection) {
 this(dataTable.getRowKeySchema(), dataTable.getBucketNum() != null);
+this.rowKeyOrderOptimizable = index.rowKeyOrderOptimizable();
 this.isMultiTenant = dataTable.isMultiTenant();
 this.viewIndexId = index.getViewIndexId() == null ? null : 
MetaDataUtil.getViewIndexIdDataType().toBytes(index.getViewIndexId());
 this.isLocalIndex = index.getIndexType() == IndexType.LOCAL;
@@ -434,7 +432,7 @@ public class IndexMaintainer implements Writable, 
IterableColumnReference {
 dataRowKeySchema.next(ptr, dataPosOffset, maxRowKeyOffset);
 output.write(ptr.get(), ptr.getOffset(), ptr.getLength());
 if 
(!dataRowKeySchema.getField(dataPosOffset).getDataType().isFixedWidth()) {
-output.writeByte(QueryConstants.SEPARATOR_BYTE);
+
output.writeByte(SchemaUtil.getSeparatorByte(rowKeyOrderOptimizable, 
ptr.getLength()==0, dataRowKeySchema.getField(dataPosOffset)));
 }
 dataPosOffset++;
 }
@@ -481,21 +479,22 @@ public class IndexMaintainer implements Writable, 
IterableColumnReference {
 }
 boolean isDataColumnInverted = dataSortOrder != SortOrder.ASC;
 PDataType indexColumnType = 
IndexUtil.getIndexColumnDataType(isNullable, dataColumnType);
-boolean isBytesComparable = 
dataColumnType.isBytesComparableWith(indexColumnType) ;
-if (isBytesComparable  isDataColumnInverted == 
descIndexColumnBitSet.get(i)) {
+boolean isBytesComparable = 
dataColumnType.isBytesComparableWith(indexColumnType);
+boolean isIndexColumnDesc = descIndexColumnBitSet.get(i);
+if (isBytesComparable  isDataColumnInverted == 
isIndexColumnDesc) {
 output.write(ptr.get(), ptr.getOffset(), ptr.getLength());
 } else {
 if (!isBytesComparable)  {
 indexColumnType.coerceBytes(ptr, dataColumnType, 
dataSortOrder, SortOrder.getDefault());
 }
-if (descIndexColumnBitSet.get(i) != isDataColumnInverted) {
+if (isDataColumnInverted != isIndexColumnDesc) {
 writeInverted(ptr.get(), ptr.getOffset(), 
ptr.getLength(), output);
 } else {
 output.write(ptr.get(), ptr.getOffset(), 
ptr.getLength());
 }
 }
 if (!indexColumnType.isFixedWidth()) {
-output.writeByte(QueryConstants.SEPARATOR_BYTE);
+
output.writeByte(SchemaUtil.getSeparatorByte(rowKeyOrderOptimizable, 
ptr.getLength() == 0, isIndexColumnDesc ? SortOrder.DESC : SortOrder.ASC));
 }
 }
 int length = stream.size();
@@ -545,7 +544,7 @@ public class IndexMaintainer implements Writable, 
IterableColumnReference {
 

[39/50] [abbrv] phoenix git commit: PHOENIX-2111 Race condition on creation of new view and adding of column to base table

2015-07-20 Thread maryannxue
PHOENIX-2111 Race condition on creation of new view and adding of column to 
base table


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9f09f1a5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9f09f1a5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9f09f1a5

Branch: refs/heads/calcite
Commit: 9f09f1a5ddce38c256c647ca7cd80617259e35ea
Parents: 4b99c63
Author: Samarth samarth.j...@salesforce.com
Authored: Tue Jul 14 17:24:01 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Tue Jul 14 17:24:01 2015 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   |  239 ++--
 .../coprocessor/generated/MetaDataProtos.java   | 1243 +-
 .../query/ConnectionQueryServicesImpl.java  |   75 +-
 .../apache/phoenix/schema/MetaDataClient.java   |   12 +-
 .../org/apache/phoenix/util/MetaDataUtil.java   |   14 +
 .../org/apache/phoenix/util/PhoenixRuntime.java |4 -
 .../org/apache/phoenix/util/UpgradeUtil.java|4 +-
 phoenix-protocol/src/main/MetaDataService.proto |   14 +-
 8 files changed, 1414 insertions(+), 191 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9f09f1a5/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index dcfe61d..5396a69 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -1068,6 +1068,50 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 return null;
 }
 
+/**
+ * 
+ * @return null if the physical table row information is not present.
+ * 
+ */
+private static Mutation getPhysicalTableForView(ListMutation 
tableMetadata, byte[][] parentSchemaTableNames) {
+int size = tableMetadata.size();
+byte[][] rowKeyMetaData = new byte[3][];
+MetaDataUtil.getTenantIdAndSchemaAndTableName(tableMetadata, 
rowKeyMetaData);
+Mutation physicalTableRow = null;
+boolean physicalTableLinkFound = false;
+if (size = 2) {
+int i = size - 1;
+while (i = 1) {
+Mutation m = tableMetadata.get(i);
+if (m instanceof Put) {
+LinkType linkType = MetaDataUtil.getLinkType(m);
+if (linkType == LinkType.PHYSICAL_TABLE) {
+physicalTableRow = m;
+physicalTableLinkFound = true;
+break;
+}
+}
+i--;
+}
+}
+if (!physicalTableLinkFound) {
+parentSchemaTableNames[0] = null;
+parentSchemaTableNames[1] = null;
+return null;
+}
+rowKeyMetaData = new byte[5][];
+getVarChars(physicalTableRow.getRow(), 5, rowKeyMetaData);
+byte[] colBytes = 
rowKeyMetaData[PhoenixDatabaseMetaData.COLUMN_NAME_INDEX];
+byte[] famBytes = 
rowKeyMetaData[PhoenixDatabaseMetaData.FAMILY_NAME_INDEX];
+if ((colBytes == null || colBytes.length == 0)  (famBytes != null  
famBytes.length  0)) {
+byte[] sName = 
SchemaUtil.getSchemaNameFromFullName(famBytes).getBytes();
+byte[] tName = 
SchemaUtil.getTableNameFromFullName(famBytes).getBytes();
+parentSchemaTableNames[0] = sName;
+parentSchemaTableNames[1] = tName;
+}
+return physicalTableRow;
+}
+
 @Override
 public void createTable(RpcController controller, CreateTableRequest 
request,
 RpcCallbackMetaDataResponse done) {
@@ -1075,66 +1119,101 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 byte[][] rowKeyMetaData = new byte[3][];
 byte[] schemaName = null;
 byte[] tableName = null;
-
 try {
 ListMutation tableMetadata = ProtobufUtil.getMutations(request);
 MetaDataUtil.getTenantIdAndSchemaAndTableName(tableMetadata, 
rowKeyMetaData);
 byte[] tenantIdBytes = 
rowKeyMetaData[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
 schemaName = 
rowKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
 tableName = 
rowKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
-byte[] parentTableName = 
MetaDataUtil.getParentTableName(tableMetadata);
-byte[] lockTableName = parentTableName == null ? tableName : 
parentTableName;
-

[49/50] [abbrv] phoenix git commit: Merge branch 'master' into calcite

2015-07-20 Thread maryannxue
Merge branch 'master' into calcite


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/597e0013
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/597e0013
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/597e0013

Branch: refs/heads/calcite
Commit: 597e0013cee4285704aecc5c36d6727262ca9885
Parents: 0641043 b38a624
Author: maryannxue wei@intel.com
Authored: Mon Jul 20 12:32:48 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Mon Jul 20 12:32:48 2015 -0400

--
 dev/eclipse_prefs_phoenix.epf   |2 +-
 .../apache/phoenix/end2end/AlterTableIT.java|  449 +++-
 .../org/apache/phoenix/end2end/ArrayIT.java |  178 ++
 .../end2end/ArrayToStringFunctionIT.java|  654 ++
 .../org/apache/phoenix/end2end/IsNullIT.java|   52 +-
 .../apache/phoenix/end2end/LpadFunctionIT.java  |   24 +
 .../apache/phoenix/end2end/MD5FunctionIT.java   |   19 +
 .../apache/phoenix/end2end/ReverseScanIT.java   |   59 +-
 .../phoenix/end2end/RowValueConstructorIT.java  |  103 +-
 .../end2end/SequenceBulkAllocationIT.java   | 1286 +++
 .../org/apache/phoenix/end2end/SequenceIT.java  |   27 +
 .../apache/phoenix/end2end/SortOrderFIT.java|  563 -
 .../org/apache/phoenix/end2end/SortOrderIT.java |  572 +
 .../phoenix/end2end/UserDefinedFunctionsIT.java |  212 +-
 .../java/org/apache/phoenix/end2end/ViewIT.java |   23 +
 .../iterate/RoundRobinResultIteratorIT.java |   80 +
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |   35 +-
 .../phoenix/compile/CreateFunctionCompiler.java |4 +-
 .../apache/phoenix/compile/FromCompiler.java|   21 +-
 .../apache/phoenix/compile/JoinCompiler.java|8 +-
 .../phoenix/compile/ListJarsQueryPlan.java  |  216 ++
 .../apache/phoenix/compile/OrderByCompiler.java |4 +-
 .../phoenix/compile/OrderPreservingTracker.java |7 +-
 .../phoenix/compile/ProjectionCompiler.java |2 +-
 .../apache/phoenix/compile/QueryCompiler.java   |9 +-
 .../org/apache/phoenix/compile/ScanRanges.java  |5 +-
 .../apache/phoenix/compile/SequenceManager.java |   70 +-
 .../compile/SequenceValueExpression.java|   14 +-
 .../compile/TupleProjectionCompiler.java|5 +-
 .../apache/phoenix/compile/UnionCompiler.java   |5 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   16 +-
 .../apache/phoenix/compile/WhereOptimizer.java  |   53 +-
 .../coprocessor/BaseScannerRegionObserver.java  |2 +
 .../coprocessor/MetaDataEndpointImpl.java   |  457 +++-
 .../phoenix/coprocessor/MetaDataProtocol.java   |2 +-
 .../coprocessor/SequenceRegionObserver.java |   64 +-
 .../UngroupedAggregateRegionObserver.java   |  108 +-
 .../coprocessor/generated/MetaDataProtos.java   | 1243 ++-
 .../coprocessor/generated/PFunctionProtos.java  |  100 +-
 .../coprocessor/generated/PTableProtos.java |  105 +-
 .../phoenix/exception/SQLExceptionCode.java |4 +
 .../apache/phoenix/execute/BaseQueryPlan.java   |   23 +-
 .../DescVarLengthFastByteComparisons.java   |  219 ++
 .../phoenix/execute/EmptyTableQueryPlan.java|   96 +
 .../org/apache/phoenix/execute/ScanPlan.java|   14 +-
 .../expression/ArrayConstructorExpression.java  |   21 +-
 .../phoenix/expression/ExpressionType.java  |4 +-
 .../phoenix/expression/OrderByExpression.java   |   13 +-
 .../RowValueConstructorExpression.java  |8 +-
 .../aggregator/ServerAggregators.java   |1 +
 .../function/ArrayConcatFunction.java   |   11 +-
 .../function/ArrayModifierFunction.java |3 +-
 .../function/ArrayToStringFunction.java |   84 +
 .../expression/function/LpadFunction.java   |8 +-
 .../expression/util/regex/JONIPattern.java  |5 +-
 .../apache/phoenix/filter/SkipScanFilter.java   |3 +-
 .../apache/phoenix/index/IndexMaintainer.java   |  127 +-
 .../phoenix/iterate/BaseResultIterators.java|  109 +-
 .../phoenix/iterate/OrderedResultIterator.java  |   52 +-
 .../iterate/RoundRobinResultIterator.java   |2 +-
 .../apache/phoenix/jdbc/PhoenixConnection.java  |   29 +-
 .../apache/phoenix/jdbc/PhoenixStatement.java   |  213 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  |9 +-
 .../util/PhoenixConfigurationUtil.java  |   14 +
 .../phoenix/monitoring/MetricsStopWatch.java|8 +-
 .../apache/phoenix/parse/AddJarsStatement.java  |   38 +
 .../org/apache/phoenix/parse/CastParseNode.java |2 +-
 .../phoenix/parse/CreateFunctionStatement.java  |7 +-
 .../phoenix/parse/DeleteJarStatement.java   |   19 +
 .../apache/phoenix/parse/ListJarsStatement.java |   34 +
 .../org/apache/phoenix/parse/PFunction.java |   28 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   22 +-
 .../apache/phoenix/parse/SelectStatement.java   |6 +-
 

[47/50] [abbrv] phoenix git commit: PHOENIX-2125 ORDER BY on full PK on salted table does not work

2015-07-20 Thread maryannxue
PHOENIX-2125 ORDER BY on full PK on salted table does not work


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b329e85b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b329e85b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b329e85b

Branch: refs/heads/calcite
Commit: b329e85b697575fcebcde9555c991038d14e4a3c
Parents: f006df5
Author: Samarth samarth.j...@salesforce.com
Authored: Fri Jul 17 17:30:56 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Fri Jul 17 17:30:56 2015 -0700

--
 .../org/apache/phoenix/execute/ScanPlan.java| 12 ++-
 .../java/org/apache/phoenix/util/ScanUtil.java  |  6 +++-
 .../phoenix/compile/QueryCompilerTest.java  | 36 ++--
 .../expression/ArrayToStringFunctionTest.java   |  4 ++-
 4 files changed, 44 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b329e85b/phoenix-core/src/main/java/org/apache/phoenix/execute/ScanPlan.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/ScanPlan.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/ScanPlan.java
index fd9f8ad..e9b8a3a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/ScanPlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/ScanPlan.java
@@ -185,9 +185,19 @@ public class ScanPlan extends BaseQueryPlan {
 if (isOrdered) {
 scanner = new MergeSortTopNResultIterator(iterators, limit, 
orderBy.getOrderByExpressions());
 } else {
-if ((isSalted || table.getIndexType() == IndexType.LOCAL)  
ScanUtil.forceRowKeyOrder(context)) { 
+if ((isSalted || table.getIndexType() == IndexType.LOCAL)  
ScanUtil.shouldRowsBeInRowKeyOrder(orderBy, context)) {
+/*
+ * For salted tables or local index, a merge sort is needed 
if: 
+ * 1) The config phoenix.query.force.rowkeyorder is set to 
true 
+ * 2) Or if the query has an order by that wants to sort
+ * the results by the row key (forward or reverse ordering)
+ */
 scanner = new MergeSortRowKeyResultIterator(iterators, 
isSalted ? SaltingUtil.NUM_SALTING_BYTES : 0, orderBy == 
OrderBy.REV_ROW_KEY_ORDER_BY);
 } else if (useRoundRobinIterator()) {
+/*
+ * For any kind of tables, round robin is possible if there is
+ * no ordering of rows needed.
+ */
 scanner = new RoundRobinResultIterator(iterators, this);
 } else {
 scanner = new ConcatResultIterator(iterators);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b329e85b/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
index d63edbb..9d104ca 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
@@ -677,11 +677,15 @@ public class ScanUtil {
  * not even row key order. Also no point doing round robin of scanners 
if fetch size
  * is 1.
  */
-return fetchSize  1  !forceRowKeyOrder(context)  
(orderBy.getOrderByExpressions().isEmpty()  orderBy != FWD_ROW_KEY_ORDER_BY 
 orderBy != REV_ROW_KEY_ORDER_BY);
+return fetchSize  1  !shouldRowsBeInRowKeyOrder(orderBy, context) 
 orderBy.getOrderByExpressions().isEmpty();
 }
 
 public static boolean forceRowKeyOrder(StatementContext context) {
 return context.getConnection().getQueryServices().getProps()
 .getBoolean(QueryServices.FORCE_ROW_KEY_ORDER_ATTRIB, 
QueryServicesOptions.DEFAULT_FORCE_ROW_KEY_ORDER);
 }
+
+public static boolean shouldRowsBeInRowKeyOrder(OrderBy orderBy, 
StatementContext context) {
+return forceRowKeyOrder(context) || orderBy == FWD_ROW_KEY_ORDER_BY || 
orderBy == REV_ROW_KEY_ORDER_BY;
+}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b329e85b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 0f34582..98b130e 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 

[11/50] [abbrv] phoenix git commit: PHOENIX-2097 Add new MD5 test for composite PK

2015-07-20 Thread maryannxue
PHOENIX-2097 Add new MD5 test for composite PK


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/69e55df8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/69e55df8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/69e55df8

Branch: refs/heads/calcite
Commit: 69e55df8ff42aedf8c12da569dc5e85cc13eb95d
Parents: efb941a
Author: James Taylor jtay...@salesforce.com
Authored: Thu Jul 2 10:03:55 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Wed Jul 8 11:57:40 2015 -0700

--
 .../apache/phoenix/end2end/MD5FunctionIT.java| 19 +++
 1 file changed, 19 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/69e55df8/phoenix-core/src/it/java/org/apache/phoenix/end2end/MD5FunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MD5FunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MD5FunctionIT.java
index ff7ebdb..a6107df 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MD5FunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MD5FunctionIT.java
@@ -52,6 +52,25 @@ public class MD5FunctionIT extends BaseHBaseManagedTimeIT {
   }  
   
   @Test
+  public void testRetrieveCompositeKey() throws Exception {
+  String testString = FOOBAR;
+  
+  Connection conn = DriverManager.getConnection(getUrl());
+  String ddl = CREATE TABLE IF NOT EXISTS MD5_RETRIEVE_TEST (k1 CHAR(3) 
NOT NULL, k2 CHAR(3) NOT NULL, CONSTRAINT PK PRIMARY KEY (K1,K2));
+  conn.createStatement().execute(ddl);
+  String dml = UPSERT INTO MD5_RETRIEVE_TEST VALUES('FOO','BAR');
+  conn.createStatement().execute(dml);
+  conn.commit();
+  
+  ResultSet rs = conn.createStatement().executeQuery(SELECT MD5((K1,K2)) 
FROM MD5_RETRIEVE_TEST);
+  assertTrue(rs.next());
+  byte[] first = 
MessageDigest.getInstance(MD5).digest(testString.getBytes());
+  byte[] second = rs.getBytes(1);
+  assertArrayEquals(first, second);
+  assertFalse(rs.next());
+  }  
+  
+  @Test
   public void testUpsert() throws Exception {
   String testString1 = mwalsh1;
   String testString2 = mwalsh2;



[16/50] [abbrv] phoenix git commit: PHOENIX-1889 Support alter/replace and drop functions(Rajeshbabu)

2015-07-20 Thread maryannxue
PHOENIX-1889 Support alter/replace and drop functions(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/11bdb0ee
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/11bdb0ee
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/11bdb0ee

Branch: refs/heads/calcite
Commit: 11bdb0eedbbea1e58b60fe8ebba21e5c168261b7
Parents: fa2d79a
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Thu Jul 9 17:24:39 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Thu Jul 9 17:24:39 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 145 ++-
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |   5 +-
 .../phoenix/compile/CreateFunctionCompiler.java |   4 +-
 .../coprocessor/MetaDataEndpointImpl.java   |  40 +++--
 .../coprocessor/generated/PFunctionProtos.java  | 100 -
 .../apache/phoenix/jdbc/PhoenixStatement.java   |  28 ++--
 .../phoenix/parse/CreateFunctionStatement.java  |   7 +-
 .../org/apache/phoenix/parse/PFunction.java |  28 +++-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   4 +-
 .../query/ConnectionQueryServicesImpl.java  |   1 +
 .../apache/phoenix/schema/MetaDataClient.java   |  20 ++-
 phoenix-protocol/src/main/PFunction.proto   |   1 +
 12 files changed, 336 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/11bdb0ee/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 613231d..e2b7b4c 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -17,15 +17,18 @@
  */
 package org.apache.phoenix.end2end;
 
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_TABLE;
+import static org.apache.phoenix.query.QueryServices.DYNAMIC_JARS_DIR_KEY;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR;
 import static 
org.apache.phoenix.util.PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM;
-import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA;
-import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_TABLE;
-import static org.apache.phoenix.query.QueryServices.DYNAMIC_JARS_DIR_KEY;
 import static org.apache.phoenix.util.TestUtil.LOCALHOST;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.BufferedInputStream;
 import java.io.File;
@@ -34,6 +37,7 @@ import java.io.FileOutputStream;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.sql.Connection;
+import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.Statement;
 import java.util.HashSet;
@@ -60,8 +64,6 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.FunctionAlreadyExistsException;
 import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
-import org.apache.phoenix.schema.types.PDataType;
-import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
@@ -561,6 +563,104 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 }
 
 @Test
+public void testUDFsWhenTimestampManagedAtClient() throws Exception {
+long ts = 100;
+Properties props = new Properties();
+props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts));
+Connection conn = DriverManager.getConnection(url, props);
+Statement stmt = conn.createStatement();
+String query = select count(*) from + SYSTEM_CATALOG_SCHEMA + .\ 
+ SYSTEM_FUNCTION_TABLE + \;
+ResultSet rs = stmt.executeQuery(query);
+rs.next();
+int numRowsBefore = rs.getInt(1);
+stmt.execute(create function mysum61(INTEGER, INTEGER CONSTANT 
defaultValue=10 minvalue=1 maxvalue=15 ) returns INTEGER as 
'org.apache.phoenix.end2end.+MY_SUM_CLASS_NAME+' using jar 
++ 

[26/50] [abbrv] phoenix git commit: PHOENIX-2109 - Joda time version conflict between phoenix and pherf

2015-07-20 Thread maryannxue
PHOENIX-2109 - Joda time version conflict between phoenix and pherf


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a772a4a0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a772a4a0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a772a4a0

Branch: refs/heads/calcite
Commit: a772a4a0ca3c9ac98335ce2148eb2b34d3ce1369
Parents: 70c542d
Author: Cody Marcel cody.mar...@gmail.com
Authored: Mon Jul 13 14:23:53 2015 -0700
Committer: Cody Marcel cody.mar...@gmail.com
Committed: Mon Jul 13 14:23:53 2015 -0700

--
 phoenix-pherf/pom.xml| 2 +-
 .../src/it/java/org/apache/phoenix/pherf/DataIngestIT.java   | 8 
 2 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a772a4a0/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 0facbde..4d27305 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -219,7 +219,7 @@
 dependency
 groupIdjoda-time/groupId
 artifactIdjoda-time/artifactId
-version1.6.2/version
+version${jodatime.version}/version
 /dependency
 dependency
 groupIdcom.googlecode.java-diff-utils/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a772a4a0/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index 828ac38..8a340b3 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -27,6 +27,7 @@ import org.apache.phoenix.pherf.configuration.Scenario;
 import org.apache.phoenix.pherf.rules.DataValue;
 import org.apache.phoenix.pherf.rules.RulesApplier;
 import org.apache.phoenix.pherf.workload.QueryExecutor;
+import org.apache.phoenix.pherf.workload.Workload;
 import org.apache.phoenix.pherf.workload.WorkloadExecutor;
 import org.apache.phoenix.pherf.workload.WriteWorkload;
 import org.junit.Before;
@@ -68,6 +69,7 @@ public class DataIngestIT extends ResultBaseTestIT {
 WriteWorkload loader = new WriteWorkload(util, parser, scenario);
 WorkloadExecutor executor = new WorkloadExecutor();
 executor.add(loader);
+executor.get();
 
 RulesApplier rulesApplier = loader.getRulesApplier();
 ListMap modelList = rulesApplier.getModelList();
@@ -90,6 +92,12 @@ public class DataIngestIT extends ResultBaseTestIT {
 data.getDistribution() == Integer.MIN_VALUE);
 }
 }
+
+// Run some queries
+Workload query = new QueryExecutor(parser, util, 
executor.getPool());
+executor.add(query);
+executor.get();
+
 } catch (Exception e) {
 fail(We had an exception:  + e.getMessage());
 }



[21/50] [abbrv] phoenix git commit: PHOENIX-2099 Backward Compatibility - Concurrent modification error on connect

2015-07-20 Thread maryannxue
PHOENIX-2099 Backward Compatibility - Concurrent modification error on connect


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/39afa9f1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/39afa9f1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/39afa9f1

Branch: refs/heads/calcite
Commit: 39afa9f18cad132a8a5c49b0e2e31671c85a3c7e
Parents: 997de5e
Author: Samarth samarth.j...@salesforce.com
Authored: Fri Jul 10 11:04:44 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Fri Jul 10 11:04:44 2015 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   |  24 +-
 .../query/ConnectionQueryServicesImpl.java  |   8 +-
 .../org/apache/phoenix/util/UpgradeUtil.java| 416 ++-
 phoenix-protocol/src/main/PTable.proto  |   1 +
 4 files changed, 237 insertions(+), 212 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/39afa9f1/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index f786768..399d56e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -39,7 +39,6 @@ import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_ARRAY_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_CONSTANT_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_VIEW_REFERENCED_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.JAR_PATH_BYTES;
-import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.KEY_SEQ_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LINK_TYPE_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.MAX_VALUE_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.MIN_VALUE_BYTES;
@@ -1567,12 +1566,6 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 // We said to drop a table, but found a view or visa versa
 if (type != expectedType) { return new 
MetaDataMutationResult(MutationCode.TABLE_NOT_FOUND,
 EnvironmentEdgeManager.currentTimeMillis(), null); 
}
-if (table.getBaseColumnCount() == 0) {
-// If the base column count hasn't been set, then it 
means that the upgrade
-// to 4.5.0 is in progress. Have the client retry the 
mutation operation.
-return new 
MetaDataMutationResult(MutationCode.CONCURRENT_TABLE_MUTATION,
-EnvironmentEdgeManager.currentTimeMillis(), 
table);
-}
 }
 result = mutator.updateMutation(table, rowKeyMetaData, 
tableMetadata, region,
 invalidateList, locks, clientTimeStamp);
@@ -1658,7 +1651,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 }
 
 // if there is already a view column with the same name as 
the base table column we are trying to add
-   if (existingViewColumn!=null) {
+   if (existingViewColumn != null) {
ListCell dataTypes = viewColumnDefinitionPut
 
.get(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
 
PhoenixDatabaseMetaData.DATA_TYPE_BYTES);
@@ -1684,7 +1677,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
}
 }
// if there is an existing view column that 
matches the column being added to the base table and if the column being added 
has a null
-   // scale or maxLength, we need to explicity do a put to 
set the scale or maxLength to null (in case the view column has the scale or 
+   // scale or maxLength, we need to explicitly do a put 
to set the scale or maxLength to null (in case the view column has the scale or 
// max length set)
ListCell columnSizes = 
viewColumnDefinitionPut.get(
 PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
@@ -1714,7 +1707,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 

[23/50] [abbrv] phoenix git commit: PHOENIX-2098 Pig Udf that given a count Reserve chunks of numbers for a sequence (Siddhi Mehta)

2015-07-20 Thread maryannxue
PHOENIX-2098 Pig Udf that given a count Reserve chunks of numbers for a 
sequence (Siddhi Mehta)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/300de02c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/300de02c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/300de02c

Branch: refs/heads/calcite
Commit: 300de02c01def7e8f23dfc1930b2d9d281c1bebd
Parents: 81b3cb3
Author: James Taylor jtay...@salesforce.com
Authored: Fri Jul 10 16:32:56 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Fri Jul 10 16:32:56 2015 -0700

--
 .../phoenix/pig/udf/ReserveNSequenceTestIT.java | 262 +++
 .../phoenix/pig/udf/ReserveNSequence.java   |  88 +++
 2 files changed, 350 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/300de02c/phoenix-pig/src/it/java/org/apache/phoenix/pig/udf/ReserveNSequenceTestIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/udf/ReserveNSequenceTestIT.java
 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/udf/ReserveNSequenceTestIT.java
new file mode 100644
index 000..2cbb6cc
--- /dev/null
+++ 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/udf/ReserveNSequenceTestIT.java
@@ -0,0 +1,262 @@
+/**
+ * 
+ */
+package org.apache.phoenix.pig.udf;
+
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
+import static org.apache.phoenix.util.TestUtil.LOCALHOST;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.pig.data.Tuple;
+import org.apache.pig.data.TupleFactory;
+import org.apache.pig.impl.util.UDFContext;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+/**
+ * Test class to run all the Pig Sequence UDF integration tests against a 
virtual map reduce cluster.
+ */
+public class ReserveNSequenceTestIT extends BaseHBaseManagedTimeIT {
+
+private static final String CREATE_SEQUENCE_SYNTAX = CREATE SEQUENCE %s 
START WITH %s INCREMENT BY %s MINVALUE %s MAXVALUE %s CACHE %s;
+private static final String SEQUENCE_NAME = my_schema.my_sequence;
+private static final long MAX_VALUE = 10;
+
+private static TupleFactory TF;
+private static Connection conn;
+private static String zkQuorum;
+private static Configuration conf;
+private static UDFContext udfContext;
+
+@Rule
+public ExpectedException thrown = ExpectedException.none();
+
+@BeforeClass
+public static void setUpBeforeClass() throws Exception {
+conf = getTestClusterConfig();
+zkQuorum = LOCALHOST + JDBC_PROTOCOL_SEPARATOR + 
getZKClientPort(getTestClusterConfig());
+conf.set(HConstants.ZOOKEEPER_QUORUM, zkQuorum);
+// Properties props = 
PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
+conn = DriverManager.getConnection(getUrl());
+// Pig variables
+TF = TupleFactory.getInstance();
+}
+
+@Before
+public void setUp() throws SQLException {
+createSequence();
+createUdfContext();
+}
+
+@Test
+public void testReserve() throws Exception {
+doTest(new UDFTestProperties(1));
+}
+
+@Test
+public void testReserveN() throws Exception {
+doTest(new UDFTestProperties(5));
+}
+
+@Test
+public void testReserveNwithPreviousAllocations() throws Exception {
+UDFTestProperties props = new UDFTestProperties(5);
+props.setCurrentValue(4);
+doTest(props);
+}
+
+@Test
+public void testReserveWithZero() throws Exception {
+UDFTestProperties props = new UDFTestProperties(0);
+props.setExceptionExpected(true);
+props.setExceptionClass(IllegalArgumentException.class);
+props.setErrorMessage(ReserveNSequence.INVALID_NUMBER_MESSAGE);
+doTest(props);
+}
+
+@Test
+public void testReserveWithNegativeNumber() throws Exception {
+UDFTestProperties props = new UDFTestProperties(-1);
+props.setExceptionExpected(true);
+props.setExceptionClass(IllegalArgumentException.class);
+props.setErrorMessage(ReserveNSequence.INVALID_NUMBER_MESSAGE);
+doTest(props);
+}
+
+@Test
+public void testReserveMaxLimit() throws Exception {
+UDFTestProperties props = 

[07/50] [abbrv] phoenix git commit: PHOENIX-2074 StackOverflowError for hash join with round robin

2015-07-20 Thread maryannxue
PHOENIX-2074 StackOverflowError for hash join with round robin


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d0c8f9db
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d0c8f9db
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d0c8f9db

Branch: refs/heads/calcite
Commit: d0c8f9dbd15460c023d50c8b8fd9334fb18c05dc
Parents: 39c982f
Author: maryannxue wei@intel.com
Authored: Tue Jul 7 13:27:49 2015 -0400
Committer: maryannxue wei@intel.com
Committed: Tue Jul 7 13:27:49 2015 -0400

--
 .../iterate/RoundRobinResultIteratorIT.java | 80 
 .../iterate/RoundRobinResultIterator.java   |  2 +-
 2 files changed, 81 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d0c8f9db/phoenix-core/src/it/java/org/apache/phoenix/iterate/RoundRobinResultIteratorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/iterate/RoundRobinResultIteratorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/iterate/RoundRobinResultIteratorIT.java
index 6a9b3d4..224ed95 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/iterate/RoundRobinResultIteratorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/iterate/RoundRobinResultIteratorIT.java
@@ -17,6 +17,7 @@
  */
 package org.apache.phoenix.iterate;
 
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
@@ -30,6 +31,7 @@ import java.sql.Statement;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
@@ -43,6 +45,7 @@ import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixResultSet;
 import org.apache.phoenix.query.ConnectionQueryServices;
 import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -301,6 +304,83 @@ public class RoundRobinResultIteratorIT extends 
BaseHBaseManagedTimeIT {
 assertEquals(Number of rows retrieved didnt match for tableB, 
insertedRowsB, rowsB);
 assertEquals(Number of rows retrieved didn't match for tableC, 
insertedRowsC, rowsC);
 }
+
+@Test
+public void testBug2074() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+try {
+conn.createStatement().execute(CREATE TABLE EVENTS 
++(id VARCHAR(10) PRIMARY KEY,  
++ article VARCHAR(10),  
++ misc VARCHAR(10)));
+
+PreparedStatement upsertStmt = conn.prepareStatement(
+upsert into EVENTS(id, article, misc)  + values (?, ?, 
?));
+upsertStmt.setString(1, 001);
+upsertStmt.setString(2, A);
+upsertStmt.setString(3, W);
+upsertStmt.execute();
+upsertStmt.setString(1, 002);
+upsertStmt.setString(2, B);
+upsertStmt.setString(3, X);
+upsertStmt.execute();
+upsertStmt.setString(1, 003);
+upsertStmt.setString(2, C);
+upsertStmt.setString(3, Y);
+upsertStmt.execute();
+upsertStmt.setString(1, 004);
+upsertStmt.setString(2, D);
+upsertStmt.setString(3, Z);
+upsertStmt.execute();
+conn.commit();
+
+conn.createStatement().execute(CREATE TABLE MAPPING 
++(id VARCHAR(10) PRIMARY KEY,  
++ article VARCHAR(10),  
++ category VARCHAR(10)));
+
+upsertStmt = conn.prepareStatement(
+upsert into MAPPING(id, article, category)  + values 
(?, ?, ?));
+upsertStmt.setString(1, 002);
+upsertStmt.setString(2, A);
+upsertStmt.setString(3, X);
+upsertStmt.execute();
+upsertStmt.setString(1, 003);
+upsertStmt.setString(2, B);
+upsertStmt.setString(3, Y);
+upsertStmt.execute();
+upsertStmt.setString(1, 004);
+upsertStmt.setString(2, C);
+upsertStmt.setString(3, Z);
+upsertStmt.execute();
+upsertStmt.setString(1, 005);
+upsertStmt.setString(2, E);
+upsertStmt.setString(3, Z);
+

[40/50] [abbrv] phoenix git commit: PHOENIX-2117 Fix flapping DataIngestIT

2015-07-20 Thread maryannxue
PHOENIX-2117 Fix flapping DataIngestIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cf2bc551
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cf2bc551
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cf2bc551

Branch: refs/heads/calcite
Commit: cf2bc55175788603830ba8bc8b3eacc0998361c1
Parents: 9f09f1a
Author: Samarth samarth.j...@salesforce.com
Authored: Tue Jul 14 17:40:29 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Tue Jul 14 17:40:46 2015 -0700

--
 phoenix-pherf/src/test/resources/datamodel/test_schema.sql | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cf2bc551/phoenix-pherf/src/test/resources/datamodel/test_schema.sql
--
diff --git a/phoenix-pherf/src/test/resources/datamodel/test_schema.sql 
b/phoenix-pherf/src/test/resources/datamodel/test_schema.sql
index 162d288..4e6b9d4 100644
--- a/phoenix-pherf/src/test/resources/datamodel/test_schema.sql
+++ b/phoenix-pherf/src/test/resources/datamodel/test_schema.sql
@@ -29,4 +29,4 @@ CREATE TABLE IF NOT EXISTS PHERF.TEST_TABLE (
 PARENT_ID,
 CREATED_DATE DESC
 )
-) VERSIONS=1,MULTI_TENANT=true,SALT_BUCKETS=16
+) VERSIONS=1,MULTI_TENANT=true



[43/50] [abbrv] phoenix git commit: PHOENIX-2123 Diverged view prevents schema propagation to the entire view hierarchy

2015-07-20 Thread maryannxue
PHOENIX-2123 Diverged view prevents schema propagation to the entire view 
hierarchy


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ec93ec79
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ec93ec79
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ec93ec79

Branch: refs/heads/calcite
Commit: ec93ec79c24cc847ed4a294da3cc0b0e3da3
Parents: 553d3cc
Author: Samarth samarth.j...@salesforce.com
Authored: Wed Jul 15 13:34:39 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Wed Jul 15 13:34:39 2015 -0700

--
 .../apache/phoenix/end2end/AlterTableIT.java| 34 +++-
 .../coprocessor/MetaDataEndpointImpl.java   |  7 ++--
 .../org/apache/phoenix/util/UpgradeUtil.java|  6 ++--
 3 files changed, 24 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ec93ec79/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 607f52a..1758dd4 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -2575,36 +2575,38 @@ public class AlterTableIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 }
 
 @Test
-public void testDivorcedViewsStayDivorced() throws Exception {
-String baseTable = testDivorcedViewsStayDivorced;
-String viewName = baseTable + _view;
+public void testDivergedViewsStayDiverged() throws Exception {
+String baseTable = testDivergedViewsStayDiverged;
+String view1 = baseTable + _view1;
+String view2 = baseTable + _view2;
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 String tableDDL = CREATE TABLE  + baseTable +  (PK1 VARCHAR NOT 
NULL PRIMARY KEY, V1 VARCHAR, V2 VARCHAR);
 conn.createStatement().execute(tableDDL);
 
-String viewDDL = CREATE VIEW  + viewName +  AS SELECT * FROM  
+ baseTable;
+String viewDDL = CREATE VIEW  + view1 +  AS SELECT * FROM  + 
baseTable;
+conn.createStatement().execute(viewDDL);
+
+viewDDL = CREATE VIEW  + view2 +  AS SELECT * FROM  + 
baseTable;
 conn.createStatement().execute(viewDDL);
 
-// Drop the column inherited from base table to divorce the view
-String dropColumn = ALTER VIEW  + viewName +  DROP COLUMN V2;
+// Drop the column inherited from base table to make it diverged
+String dropColumn = ALTER VIEW  + view1 +  DROP COLUMN V2;
 conn.createStatement().execute(dropColumn);
 
 String alterBaseTable = ALTER TABLE  + baseTable +  ADD V3 
VARCHAR;
-try {
-   conn.createStatement().execute(alterBaseTable);
-   fail();
-   }
-   catch (SQLException e) {
-   assertEquals(Unexpected exception, 
CANNOT_MUTATE_TABLE.getErrorCode(), e.getErrorCode());
-   }
-
-// Column V3 shouldn't have propagated to the divorced view.
-String sql = SELECT V3 FROM  + viewName;
+conn.createStatement().execute(alterBaseTable);
+   
+// Column V3 shouldn't have propagated to the diverged view.
+String sql = SELECT V3 FROM  + view1;
 try {
 conn.createStatement().execute(sql);
 } catch (SQLException e) {
 assertEquals(SQLExceptionCode.COLUMN_NOT_FOUND.getErrorCode(), 
e.getErrorCode());
 }
+
+// However, column V3 should have propagated to the non-diverged 
view.
+sql = SELECT V3 FROM  + view2;
+conn.createStatement().execute(sql);
 } 
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ec93ec79/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 5396a69..05e7acb 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -1693,11 +1693,10 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 

[14/50] [abbrv] phoenix git commit: PHOENIX-2058 Check for existence and compatibility of columns being added in view

2015-07-20 Thread maryannxue
PHOENIX-2058 Check for existence and compatibility of columns being added in 
view


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aa4c9003
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aa4c9003
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aa4c9003

Branch: refs/heads/calcite
Commit: aa4c9003d4bd4827718e0f37de8c270f94fd5694
Parents: 8c9a6b8
Author: Thomas D'Silva tdsi...@salesforce.com
Authored: Wed Jul 8 15:54:17 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Wed Jul 8 15:54:17 2015 -0700

--
 .../src/main/java/org/apache/phoenix/schema/MetaDataClient.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/aa4c9003/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 8a1c14b..d2d4338 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -2204,7 +2204,7 @@ public class MetaDataClient {
 String familyName = null;
 String msg = null;
 // TODO: better to return error code
-if (result.getColumnName() != null  result.getColumnName() != 
null) {
+if (result.getColumnName() != null) {
 familyName = result.getFamilyName() == null ? null : 
Bytes.toString(result.getFamilyName());
 columnName = Bytes.toString(result.getColumnName());
 msg = Cannot add/drop column referenced by VIEW;



[22/50] [abbrv] phoenix git commit: PHOENIX-2107 Update JDBC version to 4.5.0

2015-07-20 Thread maryannxue
PHOENIX-2107 Update JDBC version to 4.5.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/81b3cb35
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/81b3cb35
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/81b3cb35

Branch: refs/heads/calcite
Commit: 81b3cb35679adf7772266f2aed9456273a8b0012
Parents: 39afa9f
Author: Samarth samarth.j...@salesforce.com
Authored: Fri Jul 10 11:15:16 2015 -0700
Committer: Samarth samarth.j...@salesforce.com
Committed: Fri Jul 10 11:15:16 2015 -0700

--
 .../main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/81b3cb35/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 9009e7c..013f7a6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -55,7 +55,7 @@ import com.google.protobuf.ByteString;
  */
 public abstract class MetaDataProtocol extends MetaDataService {
 public static final int PHOENIX_MAJOR_VERSION = 4;
-public static final int PHOENIX_MINOR_VERSION = 4;
+public static final int PHOENIX_MINOR_VERSION = 5;
 public static final int PHOENIX_PATCH_NUMBER = 0;
 public static final int PHOENIX_VERSION =
 VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, 
PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER);



[12/50] [abbrv] phoenix git commit: PHOENIX-2096 Tweak criteria for when round robin iterator is used

2015-07-20 Thread maryannxue
PHOENIX-2096 Tweak criteria for when round robin iterator is used


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/61f1900d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/61f1900d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/61f1900d

Branch: refs/heads/calcite
Commit: 61f1900ddec5eb5c06245c999711e1fd0c67af53
Parents: 69e55df
Author: James Taylor jtay...@salesforce.com
Authored: Thu Jul 2 09:48:42 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Wed Jul 8 11:57:40 2015 -0700

--
 .../apache/phoenix/end2end/ReverseScanIT.java   | 29 
 .../org/apache/phoenix/execute/ScanPlan.java|  4 +--
 .../java/org/apache/phoenix/util/ScanUtil.java  |  7 ++---
 3 files changed, 28 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/61f1900d/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
index 5481d80..eca183b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ReverseScanIT.java
@@ -19,6 +19,9 @@ package org.apache.phoenix.end2end;
 
 import static org.apache.phoenix.util.TestUtil.ROW2;
 import static org.apache.phoenix.util.TestUtil.ROW3;
+import static org.apache.phoenix.util.TestUtil.ROW4;
+import static org.apache.phoenix.util.TestUtil.ROW5;
+import static org.apache.phoenix.util.TestUtil.ROW6;
 import static org.apache.phoenix.util.TestUtil.ROW7;
 import static org.apache.phoenix.util.TestUtil.ROW8;
 import static org.apache.phoenix.util.TestUtil.ROW9;
@@ -31,6 +34,7 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.sql.Statement;
 import java.util.Map;
 import java.util.Properties;
 
@@ -39,6 +43,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.BeforeClass;
@@ -81,11 +86,11 @@ public class ReverseScanIT extends BaseHBaseManagedTimeIT {
 initATableValues(tenantId, getSplitsAtRowKeys(tenantId), getUrl());
 Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 Connection conn = DriverManager.getConnection(getUrl(), props);
-String query = SELECT entity_id FROM aTable WHERE entity_id = ? 
ORDER BY organization_id DESC, entity_id DESC;
+String query = SELECT entity_id FROM aTable WHERE entity_id = ' + 
ROW3 + ' ORDER BY organization_id DESC, entity_id DESC;
 try {
-PreparedStatement statement = conn.prepareStatement(query);
-statement.setString(1, ROW7);
-ResultSet rs = statement.executeQuery();
+Statement stmt = conn.createStatement();
+stmt.setFetchSize(2);
+ResultSet rs = stmt.executeQuery(query);
 
 assertTrue (rs.next());
 assertEquals(ROW9,rs.getString(1));
@@ -93,10 +98,24 @@ public class ReverseScanIT extends BaseHBaseManagedTimeIT {
 assertEquals(ROW8,rs.getString(1));
 assertTrue (rs.next());
 assertEquals(ROW7,rs.getString(1));
+assertTrue (rs.next());
+assertEquals(ROW6,rs.getString(1));
+assertTrue (rs.next());
+assertEquals(ROW5,rs.getString(1));
+assertTrue (rs.next());
+assertEquals(ROW4,rs.getString(1));
+assertTrue (rs.next());
+assertEquals(ROW3,rs.getString(1));
 
 assertFalse(rs.next());
 
-statement = conn.prepareStatement(SELECT entity_id FROM aTable 
WHERE organization_id = ? AND entity_id = ? ORDER BY organization_id DESC, 
entity_id DESC);
+rs = conn.createStatement().executeQuery(EXPLAIN  + query);
+assertEquals(
+CLIENT PARALLEL 1-WAY REVERSE FULL SCAN OVER ATABLE\n + 
+SERVER FILTER BY FIRST KEY ONLY AND ENTITY_ID = 
'00A323122312312',
+QueryUtil.getExplainPlan(rs));
+
+PreparedStatement statement = conn.prepareStatement(SELECT 
entity_id FROM aTable WHERE organization_id = ? AND entity_id = ? ORDER BY 
organization_id DESC, entity_id DESC);
 statement.setString(1, tenantId);
 statement.setString(2, ROW7);
  

[37/50] [abbrv] phoenix git commit: PHOENIX-2111 Race condition on creation of new view and adding of column to base table

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/9f09f1a5/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index feb5989..52b038b 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -18,6 +18,9 @@
 package org.apache.phoenix.query;
 
 import static org.apache.hadoop.hbase.HColumnDescriptor.TTL;
+import static 
org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_MAJOR_VERSION;
+import static 
org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_MINOR_VERSION;
+import static 
org.apache.phoenix.coprocessor.MetaDataProtocol.PHOENIX_PATCH_NUMBER;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES;
 import static 
org.apache.phoenix.query.QueryServicesOptions.DEFAULT_DROP_METADATA;
 import static org.apache.phoenix.util.UpgradeUtil.upgradeTo4_5_0;
@@ -110,6 +113,7 @@ import org.apache.phoenix.hbase.index.Indexer;
 import org.apache.phoenix.hbase.index.covered.CoveredColumnsIndexBuilder;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
+import org.apache.phoenix.hbase.index.util.VersionUtil;
 import org.apache.phoenix.index.PhoenixIndexBuilder;
 import org.apache.phoenix.index.PhoenixIndexCodec;
 import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -966,7 +970,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 BlockingRpcCallbackGetVersionResponse 
rpcCallback =
 new 
BlockingRpcCallbackGetVersionResponse();
 GetVersionRequest.Builder builder = 
GetVersionRequest.newBuilder();
-
+
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, 
PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
 instance.getVersion(controller, builder.build(), 
rpcCallback);
 if(controller.getFailedOn() != null) {
 throw controller.getFailedOn();
@@ -1265,6 +1269,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 MutationProto mp = ProtobufUtil.toProto(m);
 
builder.addTableMetadataMutations(mp.toByteString());
 }
+
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, 
PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
 instance.createTable(controller, builder.build(), 
rpcCallback);
 if(controller.getFailedOn() != null) {
 throw controller.getFailedOn();
@@ -1293,12 +1298,12 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 builder.setTableName(ByteStringer.wrap(tableBytes));
 builder.setTableTimestamp(tableTimestamp);
 builder.setClientTimestamp(clientTimestamp);
-
-   instance.getTable(controller, builder.build(), rpcCallback);
-   if(controller.getFailedOn() != null) {
-   throw controller.getFailedOn();
-   }
-   return rpcCallback.get();
+
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, 
PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
+instance.getTable(controller, builder.build(), 
rpcCallback);
+if(controller.getFailedOn() != null) {
+throw controller.getFailedOn();
+}
+return rpcCallback.get();
 }
 });
 }
@@ -1325,7 +1330,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 }
 builder.setTableType(tableType.getSerializedValue());
 builder.setCascade(cascade);
-
+
builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, 
PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
 instance.dropTable(controller, builder.build(), 
rpcCallback);
 if(controller.getFailedOn() != null) {
 throw controller.getFailedOn();
@@ -1379,6 +1384,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 
builder.addTableMetadataMutations(mp.toByteString());
 }
 

[25/50] [abbrv] phoenix git commit: PHOENIX-2112 Phoenix-Spark need to support UTF8String for spark 1.4.0 (Yi Tian)

2015-07-20 Thread maryannxue
PHOENIX-2112 Phoenix-Spark need to support UTF8String for spark 1.4.0 (Yi Tian)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/70c542d8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/70c542d8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/70c542d8

Branch: refs/heads/calcite
Commit: 70c542d8e7b8bdcea58e2ef8cbf76143ec5ae66c
Parents: 052836c
Author: Josh Mahonin jmaho...@interset.com
Authored: Mon Jul 13 16:15:42 2015 -0400
Committer: Josh Mahonin jmaho...@interset.com
Committed: Mon Jul 13 16:15:42 2015 -0400

--
 phoenix-spark/pom.xml | 2 +-
 .../main/scala/org/apache/phoenix/spark/PhoenixRelation.scala | 7 ---
 2 files changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/70c542d8/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 289801a..ef4d2c4 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -34,7 +34,7 @@
   namePhoenix - Spark/name
 
   properties
-spark.version1.3.0/spark.version
+spark.version1.4.0/spark.version
 scala.version2.10.4/scala.version
 scala.binary.version2.10/scala.binary.version
 top.dir${project.basedir}/../top.dir

http://git-wip-us.apache.org/repos/asf/phoenix/blob/70c542d8/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala
--
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala
index 8804f3b..f20ad61 100644
--- 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala
+++ 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRelation.scala
@@ -19,10 +19,10 @@ package org.apache.phoenix.spark
 
 import org.apache.hadoop.conf.Configuration
 import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.types.StructType
+import org.apache.spark.sql.types.{UTF8String, StructType}
 import org.apache.spark.sql.{Row, SQLContext}
 import org.apache.spark.sql.sources._
-import org.apache.commons.lang.StringEscapeUtils.escapeSql
+import org.apache.phoenix.util.StringUtil.escapeStringConstant
 
 case class PhoenixRelation(tableName: String, zkUrl: String)(@transient val 
sqlContext: SQLContext)
 extends BaseRelation with PrunedFilteredScan {
@@ -91,7 +91,8 @@ case class PhoenixRelation(tableName: String, zkUrl: 
String)(@transient val sqlC
 
   // Helper function to escape string values in SQL queries
   private def compileValue(value: Any): Any = value match {
-case stringValue: String = s'${escapeSql(stringValue)}'
+case stringValue: String = s'${escapeStringConstant(stringValue)}'
+case stringValue: UTF8String = 
s'${escapeStringConstant(stringValue.toString)}'
 case _ = value
   }
 }



[32/50] [abbrv] phoenix git commit: PHOENIX-2067 Sort order incorrect for variable length DESC columns

2015-07-20 Thread maryannxue
http://git-wip-us.apache.org/repos/asf/phoenix/blob/2620a80c/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
index 4e32cc0..dd11569 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PArrayDataType.java
@@ -20,6 +20,7 @@ package org.apache.phoenix.schema.types;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.sql.Types;
 import java.text.Format;
 import java.util.LinkedList;
 import java.util.List;
@@ -34,61 +35,88 @@ import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.ValueSchema;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TrustedByteArrayOutputStream;
 
 import com.google.common.base.Objects;
 import com.google.common.base.Preconditions;
 
 /**
- * The datatype for PColummns that are Arrays. Any variable length array would 
follow the below order. 
- * Every element would be seperated by a seperator byte '0'. Null elements are 
counted and once a first 
- * non null element appears we write the count of the nulls prefixed with a 
seperator byte.
- * Trailing nulls are not taken into account. The last non null element is 
followed by two seperator bytes. 
- * For eg a, b, null, null, c, null - 65 0 66 0 0 2 67 0 0 0 
- * a null null null b c null d - 65 0 0 3 66 0 67 0 0 1 68 0 0 0.
- * The reason we use this serialization format is to allow the
- * byte array of arrays of the same type to be directly comparable against 
each other. 
- * This prevents a costly deserialization on compare and allows an array 
column to be used as the last column in a primary key constraint.
+ * The datatype for PColummns that are Arrays. Any variable length array would 
follow the below order. Every element
+ * would be seperated by a seperator byte '0'. Null elements are counted and 
once a first non null element appears we
+ * write the count of the nulls prefixed with a seperator byte. Trailing nulls 
are not taken into account. The last non
+ * null element is followed by two seperator bytes. For eg a, b, null, null, 
c, null - 65 0 66 0 0 2 67 0 0 0 a null
+ * null null b c null d - 65 0 0 3 66 0 67 0 0 1 68 0 0 0. The reason we use 
this serialization format is to allow the
+ * byte array of arrays of the same type to be directly comparable against 
each other. This prevents a costly
+ * deserialization on compare and allows an array column to be used as the 
last column in a primary key constraint.
  */
 public abstract class PArrayDataTypeT extends PDataTypeT {
 
+@Override
+public final int getResultSetSqlType() {
+  return Types.ARRAY;
+}
+
+@Override
+public final void coerceBytes(ImmutableBytesWritable ptr, Object object, 
PDataType actualType,
+Integer maxLength, Integer scale, SortOrder actualModifer, Integer 
desiredMaxLength,
+Integer desiredScale, SortOrder desiredModifier, boolean 
expectedRowKeyOrderOptimizable) {
+  coerceBytes(ptr, object, actualType, maxLength, scale, desiredMaxLength, 
desiredScale,
+  this, actualModifer, desiredModifier, 
expectedRowKeyOrderOptimizable);
+}
+
+@Override
+public final void coerceBytes(ImmutableBytesWritable ptr, Object object, 
PDataType actualType,
+Integer maxLength, Integer scale, SortOrder actualModifer, Integer 
desiredMaxLength,
+Integer desiredScale, SortOrder desiredModifier) {
+  coerceBytes(ptr, object, actualType, maxLength, scale, desiredMaxLength, 
desiredScale,
+  this, actualModifer, desiredModifier, true);
+}
+
 public static final byte ARRAY_SERIALIZATION_VERSION = 1;
 
-  protected PArrayDataType(String sqlTypeName, int sqlType, Class clazz, 
PDataCodec codec, int ordinal) {
-super(sqlTypeName, sqlType, clazz, codec, ordinal);
-  }
+protected PArrayDataType(String sqlTypeName, int sqlType, Class clazz, 
PDataCodec codec, int ordinal) {
+super(sqlTypeName, sqlType, clazz, codec, ordinal);
+}
+
+private static byte getSeparatorByte(boolean rowKeyOrderOptimizable, 
SortOrder sortOrder) {
+return SchemaUtil.getSeparatorByte(rowKeyOrderOptimizable, false, 
sortOrder);
+}
 
-  public byte[] toBytes(Object object, PDataType baseType, SortOrder 
sortOrder) {
-   if(object == null) {
-   throw new ConstraintViolationException(this +  may not 
be null);
-   }
-   PhoenixArray arr = ((PhoenixArray)object);
+public byte[] toBytes(Object object, PDataType baseType, SortOrder 
sortOrder) {
+ 

[19/50] [abbrv] phoenix git commit: PHOENIX-1954 Reserve chunks of numbers for a sequence (Jan Fernando)

2015-07-20 Thread maryannxue
PHOENIX-1954 Reserve chunks of numbers for a sequence (Jan Fernando)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3b1bfa0d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3b1bfa0d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3b1bfa0d

Branch: refs/heads/calcite
Commit: 3b1bfa0d7b83f0b9ee0ad535d6e1f99777c14cb6
Parents: 984e622
Author: James Taylor jtay...@salesforce.com
Authored: Thu Jul 9 20:49:03 2015 -0700
Committer: James Taylor jtay...@salesforce.com
Committed: Thu Jul 9 20:56:48 2015 -0700

--
 .../end2end/SequenceBulkAllocationIT.java   | 1286 ++
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |5 +-
 .../apache/phoenix/compile/SequenceManager.java |   70 +-
 .../compile/SequenceValueExpression.java|   14 +-
 .../coprocessor/SequenceRegionObserver.java |   64 +-
 .../phoenix/exception/SQLExceptionCode.java |2 +
 .../apache/phoenix/jdbc/PhoenixStatement.java   |   10 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |6 +-
 .../phoenix/parse/SequenceValueParseNode.java   |8 +-
 .../phoenix/query/ConnectionQueryServices.java  |5 +-
 .../query/ConnectionQueryServicesImpl.java  |   23 +-
 .../query/ConnectionlessQueryServicesImpl.java  |   20 +-
 .../query/DelegateConnectionQueryServices.java  |   13 +-
 .../apache/phoenix/schema/MetaDataClient.java   |2 +-
 .../org/apache/phoenix/schema/Sequence.java |  107 +-
 .../phoenix/schema/SequenceAllocation.java  |   48 +
 .../org/apache/phoenix/util/SequenceUtil.java   |   47 +-
 .../phoenix/schema/SequenceAllocationTest.java  |   59 +
 .../apache/phoenix/util/SequenceUtilTest.java   |   54 +
 19 files changed, 1763 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3b1bfa0d/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceBulkAllocationIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceBulkAllocationIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceBulkAllocationIT.java
new file mode 100644
index 000..e7db1ec
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceBulkAllocationIT.java
@@ -0,0 +1,1286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
+ * License); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
required by applicable
+ * law or agreed to in writing, software distributed under the License is 
distributed on an AS IS
+ * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied. See the License
+ * for the specific language governing permissions and limitations under the 
License.
+ */
+
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.After;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Suite of integration tests that validate that Bulk Allocation of Sequence 
values
+ * using the NEXT n VALUES FOR seq syntax works as expected and interacts
+ * correctly with NEXT VALUE FOR seq and CURRENT VALUE FOR seq.
+ * 
+ * All tests are run with both a generic connection and a multi-tenant 
connection.
+ * 
+ */

[01/50] [abbrv] phoenix git commit: PHOENIX-2036 - PhoenixConfigurationUtil should provide a pre-normalize table name to PhoenixRuntime

2015-07-20 Thread maryannxue
Repository: phoenix
Updated Branches:
  refs/heads/calcite 0641043a4 - 76e92a961


PHOENIX-2036 - PhoenixConfigurationUtil should provide a pre-normalize table 
name to PhoenixRuntime


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/be5aba5b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/be5aba5b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/be5aba5b

Branch: refs/heads/calcite
Commit: be5aba5bcdbbf28c4faa04414e9e4f4079276614
Parents: 1c10fda
Author: ravimagham ravimag...@apache.org
Authored: Sat Jul 4 08:11:25 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Sat Jul 4 08:11:25 2015 -0700

--
 .../org/apache/phoenix/util/PhoenixRuntime.java |  3 +-
 .../java/org/apache/phoenix/util/QueryUtil.java |  4 +-
 .../org/apache/phoenix/util/SchemaUtil.java | 18 ++-
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   |  9 ++--
 .../phoenix/pig/PhoenixHBaseStorerIT.java   |  4 +-
 .../apache/phoenix/pig/PhoenixHBaseStorage.java | 51 
 6 files changed, 47 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
index 4347acd..92bb1d8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
@@ -354,8 +354,7 @@ public class PhoenixRuntime {
 public static ListColumnInfo generateColumnInfo(Connection conn,
 String tableName, ListString columns)
 throws SQLException {
-
-PTable table = PhoenixRuntime.getTable(conn, tableName);
+PTable table = PhoenixRuntime.getTable(conn, 
SchemaUtil.normalizeFullTableName(tableName));
 ListColumnInfo columnInfoList = Lists.newArrayList();
 SetString unresolvedColumnNames = new TreeSetString();
 if (columns == null) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
index bc2141c..d7f9cea 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
@@ -201,8 +201,6 @@ public final class QueryUtil {
 if(columnInfos == null || columnInfos.isEmpty()) {
  throw new IllegalArgumentException(At least one column must be 
provided);
 }
-// escape the table name to ensure it is case sensitive.
-final String escapedFullTableName = 
SchemaUtil.getEscapedFullTableName(fullTableName);
 StringBuilder query = new StringBuilder();
 query.append(SELECT );
 for (ColumnInfo cinfo : columnInfos) {
@@ -215,7 +213,7 @@ public final class QueryUtil {
 // Remove the trailing comma
 query.setLength(query.length() - 1);
 query.append( FROM );
-query.append(escapedFullTableName);
+query.append(fullTableName);
 if(conditions != null  conditions.length()  0) {
 query.append( WHERE ().append(conditions).append());
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/be5aba5b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
index aff6b51..c674140 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
@@ -21,9 +21,8 @@ import static 
com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Strings.isNullOrEmpty;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES;
-import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES;
-
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES;
 
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -174,6 +173,21 @@ public class SchemaUtil {
 }
 

Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2015-07-20 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[jamestaylor] PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache-Phoenix | Master | Build Successful

2015-07-20 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/master

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master/lastCompletedBuild/testReport/

Changes
[jamestaylor] PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


phoenix git commit: PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns

2015-07-20 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 936de8815 - dcf845c25


PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dcf845c2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dcf845c2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dcf845c2

Branch: refs/heads/master
Commit: dcf845c25eb9bd619a999d16ce9e2f548ce7b491
Parents: 936de88
Author: James Taylor jamestay...@apache.org
Authored: Mon Jul 20 17:52:53 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Mon Jul 20 17:53:18 2015 -0700

--
 .../apache/phoenix/end2end/LpadFunctionIT.java  |  48 +++-
 .../org/apache/phoenix/compile/KeyPart.java |   8 +-
 .../apache/phoenix/compile/WhereOptimizer.java  | 132 +++---
 .../UngroupedAggregateRegionObserver.java   |  33 ++-
 .../phoenix/expression/LiteralExpression.java   |   3 +-
 .../expression/function/InvertFunction.java |   8 +-
 .../expression/function/PrefixFunction.java |  13 +-
 .../expression/function/RTrimFunction.java  |  15 +-
 .../function/RoundDateExpression.java   |  12 +-
 .../function/RoundDecimalExpression.java|  22 +-
 .../query/ConnectionQueryServicesImpl.java  |   8 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  25 +-
 .../apache/phoenix/schema/types/PBinary.java|  28 +-
 .../org/apache/phoenix/schema/types/PChar.java  |  11 +
 .../apache/phoenix/schema/types/PDataType.java  |   1 +
 .../org/apache/phoenix/util/PhoenixRuntime.java |  43 +++-
 .../org/apache/phoenix/util/StringUtil.java |   7 -
 .../org/apache/phoenix/util/UpgradeUtil.java| 256 +--
 18 files changed, 495 insertions(+), 178 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dcf845c2/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
index 4aa66c2..4070103 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
@@ -16,6 +16,7 @@
  */
 package org.apache.phoenix.end2end;
 
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -27,8 +28,10 @@ import java.sql.ResultSet;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.TestUtil;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.collect.Lists;
@@ -107,26 +110,59 @@ public class LpadFunctionIT extends 
BaseHBaseManagedTimeIT {
 testLpad(conn, inputList, length, fillStringList, pk, 
expectedOutputList);
 }
 
-@Ignore
 @Test
 public void testCharPadding() throws Exception {
 ResultSet rs;
 Connection conn = DriverManager.getConnection(getUrl());
+
+conn.createStatement().execute(CREATE TABLE t (k CHAR(3) PRIMARY 
KEY));
+conn.createStatement().execute(UPSERT INTO t VALUES('a'));
+conn.createStatement().execute(UPSERT INTO t VALUES('ab'));
+conn.commit();
+rs = conn.createStatement().executeQuery(SELECT * FROM t ORDER BY k);
+assertTrue(rs.next());
+assertEquals(a, rs.getString(1));
+assertTrue(rs.next());
+assertEquals(ab, rs.getString(1));
+assertFalse(rs.next());
 
 conn.createStatement().execute(CREATE TABLE tdesc (k CHAR(3) PRIMARY 
KEY DESC));
 conn.createStatement().execute(UPSERT INTO tdesc VALUES('a'));
+conn.createStatement().execute(UPSERT INTO tdesc VALUES('ab'));
 conn.commit();
-rs = conn.createStatement().executeQuery(SELECT * FROM tdesc);
+rs = conn.createStatement().executeQuery(SELECT * FROM tdesc ORDER BY 
k DESC);
+assertTrue(rs.next());
+assertEquals(ab, rs.getString(1));
 assertTrue(rs.next());
 assertEquals(a, rs.getString(1));
 assertFalse(rs.next());
+}
+
+@Test
+public void testBinaryPadding() throws Exception {
+ResultSet rs;
+Connection conn = DriverManager.getConnection(getUrl());
 
-conn.createStatement().execute(CREATE TABLE t (k CHAR(3) PRIMARY 
KEY));
+conn.createStatement().execute(CREATE TABLE t (k BINARY(3) PRIMARY 
KEY));
 conn.createStatement().execute(UPSERT 

phoenix git commit: PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns

2015-07-20 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 b08ce6c98 - 5f52793b1


PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5f52793b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5f52793b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5f52793b

Branch: refs/heads/4.x-HBase-0.98
Commit: 5f52793b1dbd7b50114d558cd394c4db59d94723
Parents: b08ce6c
Author: James Taylor jamestay...@apache.org
Authored: Mon Jul 20 17:52:53 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Mon Jul 20 17:54:59 2015 -0700

--
 .../apache/phoenix/end2end/LpadFunctionIT.java  |  48 +++-
 .../org/apache/phoenix/compile/KeyPart.java |   8 +-
 .../apache/phoenix/compile/WhereOptimizer.java  | 132 +++---
 .../UngroupedAggregateRegionObserver.java   |  33 ++-
 .../phoenix/expression/LiteralExpression.java   |   3 +-
 .../expression/function/InvertFunction.java |   8 +-
 .../expression/function/PrefixFunction.java |  13 +-
 .../expression/function/RTrimFunction.java  |  15 +-
 .../function/RoundDateExpression.java   |  12 +-
 .../function/RoundDecimalExpression.java|  22 +-
 .../query/ConnectionQueryServicesImpl.java  |   8 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  25 +-
 .../apache/phoenix/schema/types/PBinary.java|  28 +-
 .../org/apache/phoenix/schema/types/PChar.java  |  11 +
 .../apache/phoenix/schema/types/PDataType.java  |   1 +
 .../org/apache/phoenix/util/PhoenixRuntime.java |  43 +++-
 .../org/apache/phoenix/util/StringUtil.java |   7 -
 .../org/apache/phoenix/util/UpgradeUtil.java| 256 +--
 18 files changed, 495 insertions(+), 178 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5f52793b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
index 4aa66c2..4070103 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
@@ -16,6 +16,7 @@
  */
 package org.apache.phoenix.end2end;
 
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -27,8 +28,10 @@ import java.sql.ResultSet;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.TestUtil;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.collect.Lists;
@@ -107,26 +110,59 @@ public class LpadFunctionIT extends 
BaseHBaseManagedTimeIT {
 testLpad(conn, inputList, length, fillStringList, pk, 
expectedOutputList);
 }
 
-@Ignore
 @Test
 public void testCharPadding() throws Exception {
 ResultSet rs;
 Connection conn = DriverManager.getConnection(getUrl());
+
+conn.createStatement().execute(CREATE TABLE t (k CHAR(3) PRIMARY 
KEY));
+conn.createStatement().execute(UPSERT INTO t VALUES('a'));
+conn.createStatement().execute(UPSERT INTO t VALUES('ab'));
+conn.commit();
+rs = conn.createStatement().executeQuery(SELECT * FROM t ORDER BY k);
+assertTrue(rs.next());
+assertEquals(a, rs.getString(1));
+assertTrue(rs.next());
+assertEquals(ab, rs.getString(1));
+assertFalse(rs.next());
 
 conn.createStatement().execute(CREATE TABLE tdesc (k CHAR(3) PRIMARY 
KEY DESC));
 conn.createStatement().execute(UPSERT INTO tdesc VALUES('a'));
+conn.createStatement().execute(UPSERT INTO tdesc VALUES('ab'));
 conn.commit();
-rs = conn.createStatement().executeQuery(SELECT * FROM tdesc);
+rs = conn.createStatement().executeQuery(SELECT * FROM tdesc ORDER BY 
k DESC);
+assertTrue(rs.next());
+assertEquals(ab, rs.getString(1));
 assertTrue(rs.next());
 assertEquals(a, rs.getString(1));
 assertFalse(rs.next());
+}
+
+@Test
+public void testBinaryPadding() throws Exception {
+ResultSet rs;
+Connection conn = DriverManager.getConnection(getUrl());
 
-conn.createStatement().execute(CREATE TABLE t (k CHAR(3) PRIMARY 
KEY));
+conn.createStatement().execute(CREATE TABLE t (k BINARY(3) PRIMARY 
KEY));
 

phoenix git commit: PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns

2015-07-20 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 7e808368e - 354249fa7


PHOENIX-2120 Padding character is not inverted as required for DESC CHAR columns


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/354249fa
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/354249fa
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/354249fa

Branch: refs/heads/4.x-HBase-1.0
Commit: 354249fa7ea6348795e5b0e7920e0348ebd516c9
Parents: 7e80836
Author: James Taylor jamestay...@apache.org
Authored: Mon Jul 20 17:52:53 2015 -0700
Committer: James Taylor jamestay...@apache.org
Committed: Mon Jul 20 17:58:38 2015 -0700

--
 .../apache/phoenix/end2end/LpadFunctionIT.java  |  48 +++-
 .../org/apache/phoenix/compile/KeyPart.java |   8 +-
 .../apache/phoenix/compile/WhereOptimizer.java  | 132 +++---
 .../UngroupedAggregateRegionObserver.java   |  33 ++-
 .../phoenix/expression/LiteralExpression.java   |   3 +-
 .../expression/function/InvertFunction.java |   8 +-
 .../expression/function/PrefixFunction.java |  13 +-
 .../expression/function/RTrimFunction.java  |  15 +-
 .../function/RoundDateExpression.java   |  12 +-
 .../function/RoundDecimalExpression.java|  22 +-
 .../query/ConnectionQueryServicesImpl.java  |   8 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  25 +-
 .../apache/phoenix/schema/types/PBinary.java|  28 +-
 .../org/apache/phoenix/schema/types/PChar.java  |  11 +
 .../apache/phoenix/schema/types/PDataType.java  |   1 +
 .../org/apache/phoenix/util/PhoenixRuntime.java |  43 +++-
 .../org/apache/phoenix/util/StringUtil.java |   7 -
 .../org/apache/phoenix/util/UpgradeUtil.java| 256 +--
 18 files changed, 495 insertions(+), 178 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/354249fa/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
index 4aa66c2..4070103 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/LpadFunctionIT.java
@@ -16,6 +16,7 @@
  */
 package org.apache.phoenix.end2end;
 
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -27,8 +28,10 @@ import java.sql.ResultSet;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.TestUtil;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.collect.Lists;
@@ -107,26 +110,59 @@ public class LpadFunctionIT extends 
BaseHBaseManagedTimeIT {
 testLpad(conn, inputList, length, fillStringList, pk, 
expectedOutputList);
 }
 
-@Ignore
 @Test
 public void testCharPadding() throws Exception {
 ResultSet rs;
 Connection conn = DriverManager.getConnection(getUrl());
+
+conn.createStatement().execute(CREATE TABLE t (k CHAR(3) PRIMARY 
KEY));
+conn.createStatement().execute(UPSERT INTO t VALUES('a'));
+conn.createStatement().execute(UPSERT INTO t VALUES('ab'));
+conn.commit();
+rs = conn.createStatement().executeQuery(SELECT * FROM t ORDER BY k);
+assertTrue(rs.next());
+assertEquals(a, rs.getString(1));
+assertTrue(rs.next());
+assertEquals(ab, rs.getString(1));
+assertFalse(rs.next());
 
 conn.createStatement().execute(CREATE TABLE tdesc (k CHAR(3) PRIMARY 
KEY DESC));
 conn.createStatement().execute(UPSERT INTO tdesc VALUES('a'));
+conn.createStatement().execute(UPSERT INTO tdesc VALUES('ab'));
 conn.commit();
-rs = conn.createStatement().executeQuery(SELECT * FROM tdesc);
+rs = conn.createStatement().executeQuery(SELECT * FROM tdesc ORDER BY 
k DESC);
+assertTrue(rs.next());
+assertEquals(ab, rs.getString(1));
 assertTrue(rs.next());
 assertEquals(a, rs.getString(1));
 assertFalse(rs.next());
+}
+
+@Test
+public void testBinaryPadding() throws Exception {
+ResultSet rs;
+Connection conn = DriverManager.getConnection(getUrl());
 
-conn.createStatement().execute(CREATE TABLE t (k CHAR(3) PRIMARY 
KEY));
+conn.createStatement().execute(CREATE TABLE t (k BINARY(3) PRIMARY 
KEY));