phoenix git commit: PHOENIX-3538 Regex bulk loader

2017-02-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 2ebd5160c -> 8b6658864


PHOENIX-3538 Regex bulk loader

Add bulk loader which parses input based on a regular expression.

Contributed by kalyanhadooptrain...@gmail.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8b665886
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8b665886
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8b665886

Branch: refs/heads/4.x-HBase-1.1
Commit: 8b66588648b1a90df5f0c37519e9ffd94beb301f
Parents: 2ebd516
Author: Gabriel Reid 
Authored: Sun Feb 19 20:28:14 2017 +0100
Committer: Gabriel Reid 
Committed: Mon Feb 20 08:55:44 2017 +0100

--
 .../phoenix/end2end/RegexBulkLoadToolIT.java| 371 +++
 .../phoenix/mapreduce/RegexBulkLoadTool.java|  74 
 .../mapreduce/RegexToKeyValueMapper.java| 135 +++
 .../phoenix/util/regex/RegexUpsertExecutor.java |  80 
 4 files changed, 660 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8b665886/phoenix-core/src/it/java/org/apache/phoenix/end2end/RegexBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RegexBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RegexBulkLoadToolIT.java
new file mode 100644
index 000..47b0db7
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RegexBulkLoadToolIT.java
@@ -0,0 +1,371 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.query.QueryServices.DATE_FORMAT_ATTRIB;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.PrintWriter;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.Statement;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileAlreadyExistsException;
+import org.apache.phoenix.mapreduce.RegexBulkLoadTool;
+import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class RegexBulkLoadToolIT extends BaseOwnClusterIT {
+
+private static Connection conn;
+private static String zkQuorum;
+
+@BeforeClass
+public static void doSetup() throws Exception {
+setUpTestDriver(ReadOnlyProps.EMPTY_PROPS);
+zkQuorum = TestUtil.LOCALHOST + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR 
+ getUtility().getZkCluster().getClientPort();
+conn = DriverManager.getConnection(getUrl());
+}
+
+@Test
+public void testBasicImport() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE S.TABLE1 (ID INTEGER NOT NULL PRIMARY KEY, 
NAME VARCHAR, T DATE) SPLIT ON (1,2)");
+
+FileSystem fs = FileSystem.get(getUtility().getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input1.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,Name 1,1970/01/01");
+printWriter.println("2,Name 2,1970/01/02");
+printWriter.close();
+
+RegexBulkLoadTool regexBulkLoadTool = new RegexBulkLoadTool();
+regexBulkLoadTool.setConf(getUtility().getConfiguration());
+regexBulkLoadTool.getConf().set(DATE_FORMAT_ATTRIB,"/MM/dd");
+int exitCode = regexBulkLoadTool.run(new String[] {
+"--input", "/tmp/input1.csv",
+"--table", "table1",
+"--schema", "s",
+

phoenix git commit: Fix extension of SqlParserTest

2016-11-17 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/calcite e003c579c -> 99b8a02b5


Fix extension of SqlParserTest

Override the appropriate method of SqlParserTest in
PhoenixSqlParserTest.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/99b8a02b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/99b8a02b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/99b8a02b

Branch: refs/heads/calcite
Commit: 99b8a02b5ea15f6c14ba5a0a72059a9554862003
Parents: e003c57
Author: Gabriel Reid 
Authored: Thu Nov 17 19:00:50 2016 +0100
Committer: Gabriel Reid 
Committed: Thu Nov 17 19:00:50 2016 +0100

--
 .../org/apache/phoenix/calcite/PhoenixSqlParserTest.java | 8 +++-
 1 file changed, 3 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/99b8a02b/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
index 45ebeb9..846a8ab 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/calcite/PhoenixSqlParserTest.java
@@ -19,7 +19,7 @@ package org.apache.phoenix.calcite;
 
 import java.io.IOException;
 
-import org.apache.calcite.sql.parser.SqlParser;
+import org.apache.calcite.sql.parser.SqlParserImplFactory;
 import org.apache.calcite.sql.parser.SqlParserTest;
 import org.apache.phoenix.calcite.parser.PhoenixParserImpl;
 import org.junit.Test;
@@ -45,10 +45,8 @@ public class PhoenixSqlParserTest extends SqlParserTest {
 }
 
 @Override
-protected SqlParser getSqlParser(String sql) {
-return SqlParser.create(sql,
-
SqlParser.configBuilder().setParserFactory(PhoenixParserImpl.FACTORY)
-.build());
+protected SqlParserImplFactory parserImplFactory() {
+return PhoenixParserImpl.FACTORY;
 }
 
 @Override



phoenix git commit: LP-2705 Downgrade warn logging in region observer

2016-02-22 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master a1e6ae44b -> da51f46b2


LP-2705 Downgrade warn logging in region observer


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/da51f46b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/da51f46b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/da51f46b

Branch: refs/heads/master
Commit: da51f46b2394108f47368e40b5c3abd97dc33714
Parents: a1e6ae4
Author: Gabriel Reid 
Authored: Tue Feb 23 08:42:34 2016 +0100
Committer: Gabriel Reid 
Committed: Tue Feb 23 08:43:02 2016 +0100

--
 .../phoenix/coprocessor/UngroupedAggregateRegionObserver.java| 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/da51f46b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
index 7c98be0..91ec20e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
@@ -152,7 +152,7 @@ public class UngroupedAggregateRegionObserver extends 
BaseScannerRegionObserver
   }
   Mutation[] mutationArray = new Mutation[mutations.size()];
   // TODO: should we use the one that is all or none?
-  logger.warn("Committing bactch of " + mutations.size() + " mutations for 
" + region.getRegionInfo().getTable().getNameAsString());
+  logger.debug("Committing bactch of " + mutations.size() + " mutations 
for " + region.getRegionInfo().getTable().getNameAsString());
   region.batchMutate(mutations.toArray(mutationArray), 
HConstants.NO_NONCE, HConstants.NO_NONCE);
 }
 
@@ -209,7 +209,7 @@ public class UngroupedAggregateRegionObserver extends 
BaseScannerRegionObserver
 byte[] descRowKeyTableBytes = scan.getAttribute(UPGRADE_DESC_ROW_KEY);
 boolean isDescRowKeyOrderUpgrade = descRowKeyTableBytes != null;
 if (isDescRowKeyOrderUpgrade) {
-logger.warn("Upgrading row key for " + 
region.getRegionInfo().getTable().getNameAsString());
+logger.debug("Upgrading row key for " + 
region.getRegionInfo().getTable().getNameAsString());
 projectedTable = deserializeTable(descRowKeyTableBytes);
 try {
 writeToTable = PTableImpl.makePTable(projectedTable, true);



phoenix git commit: LP-2692 Config setting for disabling stats

2016-02-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 28a8b802c -> c2cc1be60


LP-2692 Config setting for disabling stats

Add configuration setting to allow disabling stats collection, for
environments where it is not desired or is causing issues.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c2cc1be6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c2cc1be6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c2cc1be6

Branch: refs/heads/master
Commit: c2cc1be60492844779ab713d5cd84d37a17e6651
Parents: 28a8b80
Author: Gabriel Reid 
Authored: Thu Feb 18 10:20:36 2016 +0100
Committer: Gabriel Reid 
Committed: Fri Feb 19 15:22:53 2016 +0100

--
 .../end2end/StatsCollectionDisabledIT.java  |  70 ++
 .../UngroupedAggregateRegionObserver.java   |  12 +-
 .../org/apache/phoenix/query/QueryServices.java |   1 +
 .../stats/DefaultStatisticsCollector.java   | 223 +++
 .../schema/stats/NoOpStatisticsCollector.java   |  72 ++
 .../phoenix/schema/stats/PTableStats.java   |   2 +-
 .../schema/stats/StatisticsCollector.java   | 213 +++---
 .../stats/StatisticsCollectorFactory.java   |  63 ++
 .../phoenix/schema/stats/StatisticsScanner.java |   2 +-
 .../phoenix/schema/stats/StatisticsWriter.java  |   6 +-
 10 files changed, 471 insertions(+), 193 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2cc1be6/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
new file mode 100644
index 000..a92a665
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Map;
+import java.util.Properties;
+
+import com.google.common.collect.Maps;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertFalse;
+
+/**
+ * Verifies that statistics are not collected if they are disabled via a 
setting
+ */
+public class StatsCollectionDisabledIT extends StatsCollectorAbstractIT {
+
+@BeforeClass
+public static void doSetup() throws Exception {
+Map props = Maps.newHashMapWithExpectedSize(3);
+// Must update config before starting server
+props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, 
Long.toString(20));
+props.put(QueryServices.STATS_ENABLED_ATTRIB, Boolean.toString(false));
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+
+@Test
+public void testStatisticsAreNotWritten() throws SQLException {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE T1 (ID INTEGER NOT NULL PRIMARY KEY, NAME 
VARCHAR)");
+stmt.execute("UPSERT INTO T1 VALUES (1, 'NAME1')");
+stmt.execute("UPSERT INTO T1 VALUES (2, 'NAME2')");
+stmt.execute("UPSERT INTO T1 VALUES (3, 'NAME3')");
+conn.commit();
+stmt.execute("UPDATE STATISTICS T1");
+ResultSet rs = stmt.executeQuery("SELECT * FROM SYSTEM.STATS");
+assertFalse(rs.next());
+rs.close();
+

phoenix git commit: LP-2692 Config setting for disabling stats

2016-02-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 972ceb5c3 -> 508751847


LP-2692 Config setting for disabling stats

Add configuration setting to allow disabling stats collection, for
environments where it is not desired or is causing issues.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/50875184
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/50875184
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/50875184

Branch: refs/heads/4.x-HBase-0.98
Commit: 5087518478be465a7d10eb9d7f7981dd4d49eed2
Parents: 972ceb5
Author: Gabriel Reid 
Authored: Fri Feb 19 14:37:27 2016 +0100
Committer: Gabriel Reid 
Committed: Fri Feb 19 14:44:45 2016 +0100

--
 .../end2end/StatsCollectionDisabledIT.java  |  70 ++
 .../UngroupedAggregateRegionObserver.java   |  12 +-
 .../org/apache/phoenix/query/QueryServices.java |   1 +
 .../stats/DefaultStatisticsCollector.java   | 221 +++
 .../schema/stats/NoOpStatisticsCollector.java   |  71 ++
 .../schema/stats/StatisticsCollector.java   | 212 +++---
 .../stats/StatisticsCollectorFactory.java   |  63 ++
 .../phoenix/schema/stats/StatisticsScanner.java |   2 +-
 8 files changed, 463 insertions(+), 189 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/50875184/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
new file mode 100644
index 000..a92a665
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Map;
+import java.util.Properties;
+
+import com.google.common.collect.Maps;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertFalse;
+
+/**
+ * Verifies that statistics are not collected if they are disabled via a 
setting
+ */
+public class StatsCollectionDisabledIT extends StatsCollectorAbstractIT {
+
+@BeforeClass
+public static void doSetup() throws Exception {
+Map props = Maps.newHashMapWithExpectedSize(3);
+// Must update config before starting server
+props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, 
Long.toString(20));
+props.put(QueryServices.STATS_ENABLED_ATTRIB, Boolean.toString(false));
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+
+@Test
+public void testStatisticsAreNotWritten() throws SQLException {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE T1 (ID INTEGER NOT NULL PRIMARY KEY, NAME 
VARCHAR)");
+stmt.execute("UPSERT INTO T1 VALUES (1, 'NAME1')");
+stmt.execute("UPSERT INTO T1 VALUES (2, 'NAME2')");
+stmt.execute("UPSERT INTO T1 VALUES (3, 'NAME3')");
+conn.commit();
+stmt.execute("UPDATE STATISTICS T1");
+ResultSet rs = stmt.executeQuery("SELECT * FROM SYSTEM.STATS");
+assertFalse(rs.next());
+rs.close();
+stmt.close();
+conn.close();
+}
+}


phoenix git commit: LP-2692 Config setting for disabling stats

2016-02-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 aa362e744 -> f88b76662


LP-2692 Config setting for disabling stats

Add configuration setting to allow disabling stats collection, for
environments where it is not desired or is causing issues.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f88b7666
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f88b7666
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f88b7666

Branch: refs/heads/4.x-HBase-1.0
Commit: f88b766628d75e25e3d9509a6f40f367b54b8021
Parents: aa362e7
Author: Gabriel Reid 
Authored: Thu Feb 18 10:20:36 2016 +0100
Committer: Gabriel Reid 
Committed: Fri Feb 19 14:35:03 2016 +0100

--
 .../end2end/StatsCollectionDisabledIT.java  |  70 ++
 .../UngroupedAggregateRegionObserver.java   |  12 +-
 .../org/apache/phoenix/query/QueryServices.java |   1 +
 .../stats/DefaultStatisticsCollector.java   | 222 +++
 .../schema/stats/NoOpStatisticsCollector.java   |  71 ++
 .../phoenix/schema/stats/PTableStats.java   |   2 +-
 .../schema/stats/StatisticsCollector.java   | 213 +++---
 .../stats/StatisticsCollectorFactory.java   |  63 ++
 .../phoenix/schema/stats/StatisticsScanner.java |   2 +-
 .../phoenix/schema/stats/StatisticsWriter.java  |   6 +-
 10 files changed, 468 insertions(+), 194 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f88b7666/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
new file mode 100644
index 000..a92a665
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectionDisabledIT.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Map;
+import java.util.Properties;
+
+import com.google.common.collect.Maps;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertFalse;
+
+/**
+ * Verifies that statistics are not collected if they are disabled via a 
setting
+ */
+public class StatsCollectionDisabledIT extends StatsCollectorAbstractIT {
+
+@BeforeClass
+public static void doSetup() throws Exception {
+Map props = Maps.newHashMapWithExpectedSize(3);
+// Must update config before starting server
+props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, 
Long.toString(20));
+props.put(QueryServices.STATS_ENABLED_ATTRIB, Boolean.toString(false));
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+
+@Test
+public void testStatisticsAreNotWritten() throws SQLException {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE T1 (ID INTEGER NOT NULL PRIMARY KEY, NAME 
VARCHAR)");
+stmt.execute("UPSERT INTO T1 VALUES (1, 'NAME1')");
+stmt.execute("UPSERT INTO T1 VALUES (2, 'NAME2')");
+stmt.execute("UPSERT INTO T1 VALUES (3, 'NAME3')");
+conn.commit();
+stmt.execute("UPDATE STATISTICS T1");
+ResultSet rs = stmt.executeQuery("SELECT * FROM SYSTEM.STATS");
+assertFalse(rs.next());
+

svn commit: r1727741 - in /phoenix/site: publish/bulk_dataload.html source/src/site/markdown/bulk_dataload.md

2016-01-30 Thread greid
Author: greid
Date: Sat Jan 30 18:09:43 2016
New Revision: 1727741

URL: http://svn.apache.org/viewvc?rev=1727741=rev
Log:
Add information workout around permissions issues when
running the bulk loader.

Modified:
phoenix/site/publish/bulk_dataload.html
phoenix/site/source/src/site/markdown/bulk_dataload.md

Modified: phoenix/site/publish/bulk_dataload.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/bulk_dataload.html?rev=1727741=1727740=1727741=diff
==
--- phoenix/site/publish/bulk_dataload.html (original)
+++ phoenix/site/publish/bulk_dataload.html Sat Jan 30 18:09:43 2016
@@ -1,7 +1,7 @@
 
 
 
 
@@ -217,17 +217,17 @@
  For higher-throughput loading distributed over the cluster, the MapReduce 
loader can be used. This loader first converts all data into HFiles, and then 
provides the created HFiles to HBase after the HFile creation is complete.  
  The MapReduce loader is launched using the hadoop command with 
the Phoenix client jar, as follows: 
   
-  hadoop jar phoenix-3.0.0-incubating-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool --table EXAMPLE --input 
/data/example.csv
+  hadoop jar phoenix-version-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool --table EXAMPLE --input 
/data/example.csv
  
   
  When using Phoenix 4.0 and above, there is a known HBase issue( “Notice 
to Mapreduce users of HBase 0.96.1 and above” https://hbase.apache.org/book.html;>https://hbase.apache.org/book.html
 ), you should use following command: 
   
-  HADOOP_CLASSPATH=$(hbase mapredcp):/path/to/hbase/conf hadoop jar 
phoenix-4.0.0-incubating-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool --table EXAMPLE --input 
/data/example.csv
+  HADOOP_CLASSPATH=$(hbase mapredcp):/path/to/hbase/conf hadoop jar 
phoenix-version-client.jar org.apache.phoenix.mapreduce.CsvBulkLoadTool 
--table EXAMPLE --input /data/example.csv
  
   
  OR 
   
-  HADOOP_CLASSPATH=/path/to/hbase-protocol.jar:/path/to/hbase/conf hadoop 
jar phoenix-4.0.0-incubating-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool --table EXAMPLE --input 
/data/example.csv
+  HADOOP_CLASSPATH=/path/to/hbase-protocol.jar:/path/to/hbase/conf hadoop 
jar phoenix-version-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool --table EXAMPLE --input 
/data/example.csv
  
   
  The input file must be present on HDFS (not the local filesystem where the 
command is being run).  
@@ -285,31 +285,46 @@
   
   Notes on the MapReduce 
importer 
   The current MR-based bulk loader will run one MR job to load your data 
table and one MR per index table to populate your indexes. Use the -it option 
to only load one of your index tables. 
-  
- 
- 
- Loading array data 
- Both the PSQL loader and MapReduce loader support loading array values 
with the -a flag. Arrays in a CSV file are represented by a field that 
uses a different delimiter than the main CSV delimiter. For example, the 
following file would represent an id field and an array of integers: 
-  
-  1,2:3:4
+   
+   Permissions issues when 
uploading HFiles 
+   There can be permissions issues in the final stage of a bulk load, when 
the created HFiles are handed over to HBase. HBase needs to be able to move the 
created HFiles, which means that it needs to have write access to the 
directories where the files have been written. If this is not the case, the 
uploading of HFiles will hang for a very long time before finally failing. 
+   There are two main workarounds for this issue: running the bulk load 
process as the hbase user, or creating the output files with as 
readable for all users. 
+   The first option can be done by simply starting the hadoop command with 
sudo -u hbase, i.e.  
+
+sudo -u hbase hadoop jar phoenix-version-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool --table EXAMPLE --input 
/data/example.csv
+ 
+
+   Creating the output files as readable by all can be done by setting the 
fs.permissions.umask-mode configuration setting to “000”. This can 
be set in the hadoop configuration on the machine being used to submit the job, 
or can be set for the job only during submission on the command line as 
follows: 
+
+hadoop jar phoenix-version-client.jar 
org.apache.phoenix.mapreduce.CsvBulkLoadTool -Dfs.permissions.umask-mode=000 
--table EXAMPLE --input /data/example.csv
+ 
+
+   
+   
+   Loading array data 
+   Both the PSQL loader and MapReduce loader support loading array values 
with the -a flag. Arrays in a CSV file are represented by a field that 
uses a different delimiter than the main CSV delimiter. For example, the 
following file would represent an id field and an array of integers: 
+
+1,2:3:4
 2,3:4,5
  
-  
- To load this file, the default delimiter (comma) would be used, and the 
array delimiter (colon) would be supplied with the parameter -a 
':'. 
- 
- 
- A note on separator characters 
- The defa

phoenix git commit: PHOENIX-2434 Improve booleans in CSV import

2015-12-14 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 265e51cbc -> 0989a80b4


PHOENIX-2434 Improve booleans in CSV import

Do not automatically treat any unrecognized input as being false,
but instead fail on it.

Also, recognize 1/0 and t/f as true/false.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0989a80b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0989a80b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0989a80b

Branch: refs/heads/4.x-HBase-0.98
Commit: 0989a80b4a06781d53fd51e1df35d2ffd274bbc2
Parents: 265e51c
Author: Bruno Dumon 
Authored: Sun Dec 13 16:04:07 2015 +0100
Committer: Gabriel Reid 
Committed: Mon Dec 14 08:47:42 2015 +0100

--
 .../phoenix/util/csv/CsvUpsertExecutor.java   | 15 +++
 .../phoenix/util/AbstractUpsertExecutorTest.java  | 18 +-
 .../phoenix/util/csv/CsvUpsertExecutorTest.java   | 17 +++--
 3 files changed, 43 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0989a80b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
index 4a3af21..cddafc6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
@@ -28,6 +28,7 @@ import javax.annotation.Nullable;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.util.ColumnInfo;
@@ -154,6 +155,20 @@ public class CsvUpsertExecutor extends 
UpsertExecutor {
 byte[] byteValue = new byte[dataType.getByteSize()];
 dataType.getCodec().encodeLong(epochTime, byteValue, 0);
 return dataType.toObject(byteValue);
+} else if (dataType == PBoolean.INSTANCE) {
+switch (input.toLowerCase()) {
+case "true":
+case "t":
+case "1":
+return Boolean.TRUE;
+case "false":
+case "f":
+case "0":
+return Boolean.FALSE;
+default:
+throw new RuntimeException("Invalid boolean value: '" 
+ input
++ "', must be one of 
['true','t','1','false','f','0']");
+}
 }
 return dataType.toObject(input);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0989a80b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
index b614312..61b03fb 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
@@ -58,7 +58,8 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 new ColumnInfo("ID", Types.BIGINT),
 new ColumnInfo("NAME", Types.VARCHAR),
 new ColumnInfo("AGE", Types.INTEGER),
-new ColumnInfo("VALUES", PIntegerArray.INSTANCE.getSqlType()));
+new ColumnInfo("VALUES", PIntegerArray.INSTANCE.getSqlType()),
+new ColumnInfo("BEARD", Types.BOOLEAN));
 
 preparedStatement = mock(PreparedStatement.class);
 upsertListener = mock(UpsertExecutor.UpsertListener.class);
@@ -72,7 +73,8 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 
 @Test
 public void testExecute() throws Exception {
-getUpsertExecutor().execute(createRecord(123L, "NameValue", 42, 
Arrays.asList(1, 2, 3)));
+getUpsertExecutor().execute(createRecord(123L, "NameValue", 42,
+Arrays.asList(1, 2, 3), true));
 
 verify(upsertListener).upsertDone(1L);
 verifyNoMoreInteractions(upsertListener);
@@ -81,6 +83,7 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
  

phoenix git commit: PHOENIX-2434 Improve booleans in CSV import

2015-12-14 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 be0ccb4c0 -> b06a20c55


PHOENIX-2434 Improve booleans in CSV import

Do not automatically treat any unrecognized input as being false,
but instead fail on it.

Also, recognize 1/0 and t/f as true/false.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b06a20c5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b06a20c5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b06a20c5

Branch: refs/heads/4.x-HBase-1.0
Commit: b06a20c55a61a14ea886f20106bb6a55640742e2
Parents: be0ccb4
Author: Bruno Dumon 
Authored: Sun Dec 13 16:04:07 2015 +0100
Committer: Gabriel Reid 
Committed: Mon Dec 14 08:42:56 2015 +0100

--
 .../phoenix/util/csv/CsvUpsertExecutor.java   | 15 +++
 .../phoenix/util/AbstractUpsertExecutorTest.java  | 18 +-
 .../phoenix/util/csv/CsvUpsertExecutorTest.java   | 17 +++--
 3 files changed, 43 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b06a20c5/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
index 4a3af21..cddafc6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
@@ -28,6 +28,7 @@ import javax.annotation.Nullable;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.util.ColumnInfo;
@@ -154,6 +155,20 @@ public class CsvUpsertExecutor extends 
UpsertExecutor {
 byte[] byteValue = new byte[dataType.getByteSize()];
 dataType.getCodec().encodeLong(epochTime, byteValue, 0);
 return dataType.toObject(byteValue);
+} else if (dataType == PBoolean.INSTANCE) {
+switch (input.toLowerCase()) {
+case "true":
+case "t":
+case "1":
+return Boolean.TRUE;
+case "false":
+case "f":
+case "0":
+return Boolean.FALSE;
+default:
+throw new RuntimeException("Invalid boolean value: '" 
+ input
++ "', must be one of 
['true','t','1','false','f','0']");
+}
 }
 return dataType.toObject(input);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b06a20c5/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
index b614312..61b03fb 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
@@ -58,7 +58,8 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 new ColumnInfo("ID", Types.BIGINT),
 new ColumnInfo("NAME", Types.VARCHAR),
 new ColumnInfo("AGE", Types.INTEGER),
-new ColumnInfo("VALUES", PIntegerArray.INSTANCE.getSqlType()));
+new ColumnInfo("VALUES", PIntegerArray.INSTANCE.getSqlType()),
+new ColumnInfo("BEARD", Types.BOOLEAN));
 
 preparedStatement = mock(PreparedStatement.class);
 upsertListener = mock(UpsertExecutor.UpsertListener.class);
@@ -72,7 +73,8 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 
 @Test
 public void testExecute() throws Exception {
-getUpsertExecutor().execute(createRecord(123L, "NameValue", 42, 
Arrays.asList(1, 2, 3)));
+getUpsertExecutor().execute(createRecord(123L, "NameValue", 42,
+Arrays.asList(1, 2, 3), true));
 
 verify(upsertListener).upsertDone(1L);
 verifyNoMoreInteractions(upsertListener);
@@ -81,6 +83,7 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles

phoenix git commit: PHOENIX-2434 Improve booleans in CSV import

2015-12-14 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master afa9dee2b -> e8a7feedb


PHOENIX-2434 Improve booleans in CSV import

Do not automatically treat any unrecognized input as being false,
but instead fail on it.

Also, recognize 1/0 and t/f as true/false.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e8a7feed
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e8a7feed
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e8a7feed

Branch: refs/heads/master
Commit: e8a7feedb260bbb4c5abc88ab5e3325f98176ee9
Parents: afa9dee
Author: Bruno Dumon 
Authored: Sun Dec 13 16:04:07 2015 +0100
Committer: Gabriel Reid 
Committed: Mon Dec 14 08:37:48 2015 +0100

--
 .../phoenix/util/csv/CsvUpsertExecutor.java   | 15 +++
 .../phoenix/util/AbstractUpsertExecutorTest.java  | 18 +-
 .../phoenix/util/csv/CsvUpsertExecutorTest.java   | 17 +++--
 3 files changed, 43 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e8a7feed/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
index 4a3af21..cddafc6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
@@ -28,6 +28,7 @@ import javax.annotation.Nullable;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.util.ColumnInfo;
@@ -154,6 +155,20 @@ public class CsvUpsertExecutor extends 
UpsertExecutor {
 byte[] byteValue = new byte[dataType.getByteSize()];
 dataType.getCodec().encodeLong(epochTime, byteValue, 0);
 return dataType.toObject(byteValue);
+} else if (dataType == PBoolean.INSTANCE) {
+switch (input.toLowerCase()) {
+case "true":
+case "t":
+case "1":
+return Boolean.TRUE;
+case "false":
+case "f":
+case "0":
+return Boolean.FALSE;
+default:
+throw new RuntimeException("Invalid boolean value: '" 
+ input
++ "', must be one of 
['true','t','1','false','f','0']");
+}
 }
 return dataType.toObject(input);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e8a7feed/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
index b614312..61b03fb 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/util/AbstractUpsertExecutorTest.java
@@ -58,7 +58,8 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 new ColumnInfo("ID", Types.BIGINT),
 new ColumnInfo("NAME", Types.VARCHAR),
 new ColumnInfo("AGE", Types.INTEGER),
-new ColumnInfo("VALUES", PIntegerArray.INSTANCE.getSqlType()));
+new ColumnInfo("VALUES", PIntegerArray.INSTANCE.getSqlType()),
+new ColumnInfo("BEARD", Types.BOOLEAN));
 
 preparedStatement = mock(PreparedStatement.class);
 upsertListener = mock(UpsertExecutor.UpsertListener.class);
@@ -72,7 +73,8 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 
 @Test
 public void testExecute() throws Exception {
-getUpsertExecutor().execute(createRecord(123L, "NameValue", 42, 
Arrays.asList(1, 2, 3)));
+getUpsertExecutor().execute(createRecord(123L, "NameValue", 42,
+Arrays.asList(1, 2, 3), true));
 
 verify(upsertListener).upsertDone(1L);
 verifyNoMoreInteractions(upsertListener);
@@ -81,6 +83,7 @@ public abstract class AbstractUpsertExecutorTest 
extends BaseConnectionles
 

phoenix git commit: PHOENIX-2484 Exclude logback impl from Tephra

2015-12-12 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 b24fdc053 -> aec4587fd


PHOENIX-2484 Exclude logback impl from Tephra

Exclude the logback dependency that is pulled in by Tephra,
allowing Tephra's SLF4J logging to be delegated into log4j,
and having everything configured via a single log4j.properties
config.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aec4587f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aec4587f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aec4587f

Branch: refs/heads/4.x-HBase-0.98
Commit: aec4587fdf73708d01011fba84c9d8d96ef805e0
Parents: b24fdc0
Author: Gabriel Reid 
Authored: Fri Dec 11 13:21:41 2015 +0100
Committer: Gabriel Reid 
Committed: Sun Dec 13 06:23:41 2015 +0100

--
 phoenix-core/pom.xml   |  4 
 phoenix-flume/pom.xml  |  1 -
 phoenix-pherf/pom.xml  |  9 -
 phoenix-pig/pom.xml|  1 -
 phoenix-server/pom.xml |  1 -
 phoenix-spark/pom.xml  |  1 -
 pom.xml| 44 
 7 files changed, 48 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 143b3d5..2208b94 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -226,24 +226,20 @@
 
   co.cask.tephra
   tephra-api
-  ${tephra.version}
 
 
   co.cask.tephra
   tephra-core
-  ${tephra.version}
 
 
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
 
   co.cask.tephra
   tephra-hbase-compat-0.98
-  ${tephra.version}
 
   
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index b17ef5c..fc77645 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -177,7 +177,6 @@
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 50cc134..ec3b199 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -100,11 +100,10 @@
test


-   co.cask.tephra
-   tephra-core
-   test-jar
-   ${tephra.version}
-   test
+   co.cask.tephra
+   tephra-core
+   test-jar
+   test


 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index e27ccc1..1b78c28 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -151,7 +151,6 @@
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index 7ce13f0..3a19f87 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -110,7 +110,6 @@
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index bc77a26..b42fded 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -475,7 +475,6 @@
 co.cask.tephra
 tephra-core
 test-jar
-${tephra.version}
 test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec4587f/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 7ca319f..d379bfa 100644
--- a/pom.xml
+++ b/pom.xml
@@ -591,6 +591,50 @@
 ${calcite.version}
   
 
+  
+  
+co.cask.tephra
+tephra-api
+${tephra.version}
+  
+  
+co.cask.tephra
+tephra-core
+${tephra.version}
+
+  
+ch.qos.logback
+logback-core
+  
+   

phoenix git commit: PHOENIX-2484 Exclude logback impl from Tephra

2015-12-12 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 deffe071c -> f15790679


PHOENIX-2484 Exclude logback impl from Tephra

Exclude the logback dependency that is pulled in by Tephra,
allowing Tephra's SLF4J logging to be delegated into log4j,
and having everything configured via a single log4j.properties
config.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f1579067
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f1579067
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f1579067

Branch: refs/heads/4.x-HBase-1.0
Commit: f15790679f8f9cb95b28562266cc198f0e4309fc
Parents: deffe07
Author: Gabriel Reid 
Authored: Fri Dec 11 13:21:41 2015 +0100
Committer: Gabriel Reid 
Committed: Sun Dec 13 06:24:48 2015 +0100

--
 phoenix-core/pom.xml   |  4 
 phoenix-flume/pom.xml  |  1 -
 phoenix-pherf/pom.xml  |  9 -
 phoenix-pig/pom.xml|  1 -
 phoenix-server/pom.xml |  1 -
 phoenix-spark/pom.xml  |  1 -
 pom.xml| 44 
 7 files changed, 48 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index f504a8f..73b4795 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -226,24 +226,20 @@
 
   co.cask.tephra
   tephra-api
-  ${tephra.version}
 
 
   co.cask.tephra
   tephra-core
-  ${tephra.version}
 
 
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
 
   co.cask.tephra
   tephra-hbase-compat-1.0
-  ${tephra.version}
 
   
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index b2b131e..35db13b 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -177,7 +177,6 @@
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index de3a2e3..8dc3977 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -100,11 +100,10 @@
test


-   co.cask.tephra
-   tephra-core
-   test-jar
-   ${tephra.version}
-   test
+   co.cask.tephra
+   tephra-core
+   test-jar
+   test


 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 8d6cb7e..29ceb86 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -151,7 +151,6 @@
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index b128d6a..c9b7b17 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -110,7 +110,6 @@
   co.cask.tephra
   tephra-core
   test-jar
-  ${tephra.version}
   test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index a0f6abd..af37b1e 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -475,7 +475,6 @@
 co.cask.tephra
 tephra-core
 test-jar
-${tephra.version}
 test
 
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1579067/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 97f9f90..0b65a66 100644
--- a/pom.xml
+++ b/pom.xml
@@ -590,6 +590,50 @@
 ${calcite.version}
   
 
+  
+  
+co.cask.tephra
+tephra-api
+${tephra.version}
+  
+  
+co.cask.tephra
+tephra-core
+${tephra.version}
+
+  
+ch.qos.logback
+logback-core
+  
+  

phoenix git commit: PHOENIX-2515 Fix error if Hadoop not installed

2015-12-12 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 aec4587fd -> 265e51cbc


PHOENIX-2515 Fix error if Hadoop not installed

Don't log an error in bin scripts when looking for the
Hadoop classpath if Hadoop isn't installed.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/265e51cb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/265e51cb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/265e51cb

Branch: refs/heads/4.x-HBase-0.98
Commit: 265e51cbc4fe2cf36f8b440981187fc60b4662b0
Parents: aec4587
Author: Gabriel Reid 
Authored: Fri Dec 11 13:50:47 2015 +0100
Committer: Gabriel Reid 
Committed: Sun Dec 13 07:05:10 2015 +0100

--
 bin/phoenix_utils.py | 15 +--
 1 file changed, 9 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/265e51cb/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 5fb4f08..6b00d5f 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -53,15 +53,18 @@ def findFileInPathWithoutRecursion(pattern, path):
 
 return ""
 
-def which(file):
+def which(command):
 for path in os.environ["PATH"].split(os.pathsep):
-if os.path.exists(os.path.join(path, file)):
-return os.path.join(path, file)
+if os.path.exists(os.path.join(path, command)):
+return os.path.join(path, command)
 return None
 
-def findClasspath(file):
-aPath = which(file)
-command = "%s%s" %(aPath, ' classpath')
+def findClasspath(command_name):
+command_path = which(command_name)
+if command_path is None:
+# We don't have this command, so we can't get its classpath
+return ''
+command = "%s%s" %(command_path, ' classpath')
 return subprocess.Popen(command, shell=True, 
stdout=subprocess.PIPE).stdout.read()
 
 def setPath():



phoenix git commit: PHOENIX-2515 Fix error if Hadoop not installed

2015-12-12 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 f15790679 -> be0ccb4c0


PHOENIX-2515 Fix error if Hadoop not installed

Don't log an error in bin scripts when looking for the
Hadoop classpath if Hadoop isn't installed.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/be0ccb4c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/be0ccb4c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/be0ccb4c

Branch: refs/heads/4.x-HBase-1.0
Commit: be0ccb4c0de9fc3bde18afce3f06f1f6eb664057
Parents: f157906
Author: Gabriel Reid 
Authored: Fri Dec 11 13:50:47 2015 +0100
Committer: Gabriel Reid 
Committed: Sun Dec 13 07:05:19 2015 +0100

--
 bin/phoenix_utils.py | 15 +--
 1 file changed, 9 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/be0ccb4c/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 5fb4f08..6b00d5f 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -53,15 +53,18 @@ def findFileInPathWithoutRecursion(pattern, path):
 
 return ""
 
-def which(file):
+def which(command):
 for path in os.environ["PATH"].split(os.pathsep):
-if os.path.exists(os.path.join(path, file)):
-return os.path.join(path, file)
+if os.path.exists(os.path.join(path, command)):
+return os.path.join(path, command)
 return None
 
-def findClasspath(file):
-aPath = which(file)
-command = "%s%s" %(aPath, ' classpath')
+def findClasspath(command_name):
+command_path = which(command_name)
+if command_path is None:
+# We don't have this command, so we can't get its classpath
+return ''
+command = "%s%s" %(command_path, ' classpath')
 return subprocess.Popen(command, shell=True, 
stdout=subprocess.PIPE).stdout.read()
 
 def setPath():



phoenix git commit: PHOENIX-2515 Fix error if Hadoop not installed

2015-12-12 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 6d68a511f -> afa9dee2b


PHOENIX-2515 Fix error if Hadoop not installed

Don't log an error in bin scripts when looking for the
Hadoop classpath if Hadoop isn't installed.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/afa9dee2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/afa9dee2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/afa9dee2

Branch: refs/heads/master
Commit: afa9dee2be618cca8bd43a11311aea2062fa551c
Parents: 6d68a51
Author: Gabriel Reid 
Authored: Fri Dec 11 13:50:47 2015 +0100
Committer: Gabriel Reid 
Committed: Sun Dec 13 07:05:01 2015 +0100

--
 bin/phoenix_utils.py | 15 +--
 1 file changed, 9 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/afa9dee2/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 5fb4f08..6b00d5f 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -53,15 +53,18 @@ def findFileInPathWithoutRecursion(pattern, path):
 
 return ""
 
-def which(file):
+def which(command):
 for path in os.environ["PATH"].split(os.pathsep):
-if os.path.exists(os.path.join(path, file)):
-return os.path.join(path, file)
+if os.path.exists(os.path.join(path, command)):
+return os.path.join(path, command)
 return None
 
-def findClasspath(file):
-aPath = which(file)
-command = "%s%s" %(aPath, ' classpath')
+def findClasspath(command_name):
+command_path = which(command_name)
+if command_path is None:
+# We don't have this command, so we can't get its classpath
+return ''
+command = "%s%s" %(command_path, ' classpath')
 return subprocess.Popen(command, shell=True, 
stdout=subprocess.PIPE).stdout.read()
 
 def setPath():



phoenix git commit: PHOENIX-2387 Exclude sandbox from binary dist

2015-11-21 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master d59846bf5 -> 76a8d1804


PHOENIX-2387 Exclude sandbox from binary dist

The phoenix_sandbox script can only be run in a source
distribution (by making use of maven to download all integration
test dependencies for HBase, HDFS, etc). Making it work in the
binary dist would bloat and complicate the binary distribution
quite a bit, so we just exclude it.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/76a8d180
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/76a8d180
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/76a8d180

Branch: refs/heads/master
Commit: 76a8d18043b5903253d5fff83609731e7e9f283d
Parents: d59846b
Author: Gabriel Reid 
Authored: Tue Nov 17 09:28:39 2015 +0100
Committer: Gabriel Reid 
Committed: Sat Nov 21 20:34:04 2015 +0100

--
 .../src/build/components/all-common-files.xml | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/76a8d180/phoenix-assembly/src/build/components/all-common-files.xml
--
diff --git a/phoenix-assembly/src/build/components/all-common-files.xml 
b/phoenix-assembly/src/build/components/all-common-files.xml
index cd5260f..af7888d 100644
--- a/phoenix-assembly/src/build/components/all-common-files.xml
+++ b/phoenix-assembly/src/build/components/all-common-files.xml
@@ -40,6 +40,12 @@
 *.py
 *.sh
   
+  
+
+*sandbox*
+  
 
 
 
@@ -50,6 +56,10 @@
   
 *.py*
 *.sh*
+
+*sandbox*
   
 
 



phoenix git commit: PHOENIX-2387 Exclude sandbox from binary dist

2015-11-21 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 e4cf8256c -> 6d9372dfd


PHOENIX-2387 Exclude sandbox from binary dist

The phoenix_sandbox script can only be run in a source
distribution (by making use of maven to download all integration
test dependencies for HBase, HDFS, etc). Making it work in the
binary dist would bloat and complicate the binary distribution
quite a bit, so we just exclude it.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6d9372df
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6d9372df
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6d9372df

Branch: refs/heads/4.x-HBase-1.0
Commit: 6d9372dfda3e0da70c6b83236610aa487a62c949
Parents: e4cf825
Author: Gabriel Reid 
Authored: Tue Nov 17 09:28:39 2015 +0100
Committer: Gabriel Reid 
Committed: Sat Nov 21 20:34:39 2015 +0100

--
 .../src/build/components/all-common-files.xml | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6d9372df/phoenix-assembly/src/build/components/all-common-files.xml
--
diff --git a/phoenix-assembly/src/build/components/all-common-files.xml 
b/phoenix-assembly/src/build/components/all-common-files.xml
index cd5260f..af7888d 100644
--- a/phoenix-assembly/src/build/components/all-common-files.xml
+++ b/phoenix-assembly/src/build/components/all-common-files.xml
@@ -40,6 +40,12 @@
 *.py
 *.sh
   
+  
+
+*sandbox*
+  
 
 
 
@@ -50,6 +56,10 @@
   
 *.py*
 *.sh*
+
+*sandbox*
   
 
 



phoenix git commit: PHOENIX-2387 Exclude sandbox from binary dist

2015-11-21 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 d075f1cc9 -> 27e7590f7


PHOENIX-2387 Exclude sandbox from binary dist

The phoenix_sandbox script can only be run in a source
distribution (by making use of maven to download all integration
test dependencies for HBase, HDFS, etc). Making it work in the
binary dist would bloat and complicate the binary distribution
quite a bit, so we just exclude it.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/27e7590f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/27e7590f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/27e7590f

Branch: refs/heads/4.x-HBase-0.98
Commit: 27e7590f795325a66251cc453781a3212485bf5b
Parents: d075f1c
Author: Gabriel Reid 
Authored: Tue Nov 17 09:28:39 2015 +0100
Committer: Gabriel Reid 
Committed: Sat Nov 21 20:34:26 2015 +0100

--
 .../src/build/components/all-common-files.xml | 10 ++
 1 file changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/27e7590f/phoenix-assembly/src/build/components/all-common-files.xml
--
diff --git a/phoenix-assembly/src/build/components/all-common-files.xml 
b/phoenix-assembly/src/build/components/all-common-files.xml
index cd5260f..af7888d 100644
--- a/phoenix-assembly/src/build/components/all-common-files.xml
+++ b/phoenix-assembly/src/build/components/all-common-files.xml
@@ -40,6 +40,12 @@
 *.py
 *.sh
   
+  
+
+*sandbox*
+  
 
 
 
@@ -50,6 +56,10 @@
   
 *.py*
 *.sh*
+
+*sandbox*
   
 
 



phoenix git commit: PHOENIX-2387 Fix sandbox error printing

2015-11-07 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 3e798008d -> 79a36c16e


PHOENIX-2387 Fix sandbox error printing

Use the correct sys field (stderr) to write out the error message
if the project has not been built before starting the sandbox.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/79a36c16
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/79a36c16
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/79a36c16

Branch: refs/heads/4.x-HBase-1.0
Commit: 79a36c16eb229e67c7dd73c47cbf9748c96357e1
Parents: 3e79800
Author: Gabriel Reid 
Authored: Sat Nov 7 09:07:22 2015 +0100
Committer: Gabriel Reid 
Committed: Sat Nov 7 18:07:02 2015 +0100

--
 bin/phoenix_sandbox.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/79a36c16/bin/phoenix_sandbox.py
--
diff --git a/bin/phoenix_sandbox.py b/bin/phoenix_sandbox.py
index 7523ef7..433bc98 100755
--- a/bin/phoenix_sandbox.py
+++ b/bin/phoenix_sandbox.py
@@ -33,8 +33,8 @@ cp_file_path = os.path.join(phoenix_target_dir, 
'cached_classpath.txt')
 
 
 if not os.path.exists(cp_file_path):
-sys.err.write("cached_classpath.txt is not present under "
-+ "phoenix-core/target, please rebuild the project first")
+sys.stderr.write("cached_classpath.txt is not present under "
++ "phoenix-core/target, please rebuild the project first\n")
 sys.exit(1)
 
 logging_config = os.path.join(base_dir, 'bin', 'sandbox-log4j.properties')



phoenix git commit: PHOENIX-2387 Fix sandbox error printing

2015-11-07 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 e7cdb9a5d -> 9ce74ca18


PHOENIX-2387 Fix sandbox error printing

Use the correct sys field (stderr) to write out the error message
if the project has not been built before starting the sandbox.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9ce74ca1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9ce74ca1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9ce74ca1

Branch: refs/heads/4.x-HBase-0.98
Commit: 9ce74ca18ea9d54c8d2a9eaeeae84f2ee9e6df18
Parents: e7cdb9a
Author: Gabriel Reid 
Authored: Sat Nov 7 09:07:22 2015 +0100
Committer: Gabriel Reid 
Committed: Sat Nov 7 18:06:24 2015 +0100

--
 bin/phoenix_sandbox.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9ce74ca1/bin/phoenix_sandbox.py
--
diff --git a/bin/phoenix_sandbox.py b/bin/phoenix_sandbox.py
index 7523ef7..433bc98 100755
--- a/bin/phoenix_sandbox.py
+++ b/bin/phoenix_sandbox.py
@@ -33,8 +33,8 @@ cp_file_path = os.path.join(phoenix_target_dir, 
'cached_classpath.txt')
 
 
 if not os.path.exists(cp_file_path):
-sys.err.write("cached_classpath.txt is not present under "
-+ "phoenix-core/target, please rebuild the project first")
+sys.stderr.write("cached_classpath.txt is not present under "
++ "phoenix-core/target, please rebuild the project first\n")
 sys.exit(1)
 
 logging_config = os.path.join(base_dir, 'bin', 'sandbox-log4j.properties')



phoenix git commit: PHOENIX-2387 Fix sandbox error printing

2015-11-07 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master a9a9b24c9 -> ee435fb18


PHOENIX-2387 Fix sandbox error printing

Use the correct sys field (stderr) to write out the error message
if the project has not been built before starting the sandbox.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ee435fb1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ee435fb1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ee435fb1

Branch: refs/heads/master
Commit: ee435fb182065c10bd52591289b5f20595498197
Parents: a9a9b24
Author: Gabriel Reid 
Authored: Sat Nov 7 09:07:22 2015 +0100
Committer: Gabriel Reid 
Committed: Sat Nov 7 18:05:53 2015 +0100

--
 bin/phoenix_sandbox.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ee435fb1/bin/phoenix_sandbox.py
--
diff --git a/bin/phoenix_sandbox.py b/bin/phoenix_sandbox.py
index 7523ef7..433bc98 100755
--- a/bin/phoenix_sandbox.py
+++ b/bin/phoenix_sandbox.py
@@ -33,8 +33,8 @@ cp_file_path = os.path.join(phoenix_target_dir, 
'cached_classpath.txt')
 
 
 if not os.path.exists(cp_file_path):
-sys.err.write("cached_classpath.txt is not present under "
-+ "phoenix-core/target, please rebuild the project first")
+sys.stderr.write("cached_classpath.txt is not present under "
++ "phoenix-core/target, please rebuild the project first\n")
 sys.exit(1)
 
 logging_config = os.path.join(base_dir, 'bin', 'sandbox-log4j.properties')



phoenix git commit: PHOENIX-2353 Return exit status from bulk load

2015-10-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 72633b9d4 -> c26cce503


PHOENIX-2353 Return exit status from bulk load

Exit with the actual exit code from the ToolRunner when running
the CsvBulkLoadTool.

Contributed by Matt Kowalczyk.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c26cce50
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c26cce50
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c26cce50

Branch: refs/heads/master
Commit: c26cce5038971a01a4864e473ec249f78ebfa892
Parents: 72633b9
Author: Gabriel Reid 
Authored: Thu Oct 29 15:51:43 2015 +0100
Committer: Gabriel Reid 
Committed: Thu Oct 29 15:54:52 2015 +0100

--
 .../main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c26cce50/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 022487e..20f05ff 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -96,7 +96,8 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 static final Option HELP_OPT = new Option("h", "help", false, "Show this 
help and quit");
 
 public static void main(String[] args) throws Exception {
-ToolRunner.run(new CsvBulkLoadTool(), args);
+int exitStatus = ToolRunner.run(new CsvBulkLoadTool(), args);
+System.exit(exitStatus);
 }
 
 /**



phoenix git commit: PHOENIX-2353 Return exit status from bulk load

2015-10-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 e796d88c3 -> b8c1d6cea


PHOENIX-2353 Return exit status from bulk load

Exit with the actual exit code from the ToolRunner when running
the CsvBulkLoadTool.

Contributed by Matt Kowalczyk.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b8c1d6ce
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b8c1d6ce
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b8c1d6ce

Branch: refs/heads/4.x-HBase-1.0
Commit: b8c1d6cea77b62841b147e6e92aa083c4bedcf23
Parents: e796d88
Author: Gabriel Reid 
Authored: Thu Oct 29 15:51:43 2015 +0100
Committer: Gabriel Reid 
Committed: Thu Oct 29 15:54:02 2015 +0100

--
 .../main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b8c1d6ce/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 022487e..20f05ff 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -96,7 +96,8 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 static final Option HELP_OPT = new Option("h", "help", false, "Show this 
help and quit");
 
 public static void main(String[] args) throws Exception {
-ToolRunner.run(new CsvBulkLoadTool(), args);
+int exitStatus = ToolRunner.run(new CsvBulkLoadTool(), args);
+System.exit(exitStatus);
 }
 
 /**



phoenix git commit: PHOENIX-2353 Return exit status from bulk load

2015-10-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 69032041f -> b8f7fac10


PHOENIX-2353 Return exit status from bulk load

Exit with the actual exit code from the ToolRunner when running
the CsvBulkLoadTool.

Contributed by Matt Kowalczyk.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b8f7fac1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b8f7fac1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b8f7fac1

Branch: refs/heads/4.x-HBase-0.98
Commit: b8f7fac10534302f89b6780192a377a065ae24d7
Parents: 6903204
Author: Gabriel Reid 
Authored: Thu Oct 29 15:51:43 2015 +0100
Committer: Gabriel Reid 
Committed: Thu Oct 29 15:53:10 2015 +0100

--
 .../main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java   | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b8f7fac1/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
index 022487e..20f05ff 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java
@@ -96,7 +96,8 @@ public class CsvBulkLoadTool extends Configured implements 
Tool {
 static final Option HELP_OPT = new Option("h", "help", false, "Show this 
help and quit");
 
 public static void main(String[] args) throws Exception {
-ToolRunner.run(new CsvBulkLoadTool(), args);
+int exitStatus = ToolRunner.run(new CsvBulkLoadTool(), args);
+System.exit(exitStatus);
 }
 
 /**



phoenix git commit: PHOENIX-2289 Avoid Class.getClassLoader calls

2015-09-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master d74a37fef -> 5ecd4967f


PHOENIX-2289 Avoid Class.getClassLoader calls

Cache the class loader from the PhoenixContextExecutor class
to avoid relatively expensive calls to Class.getClassLoader.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5ecd4967
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5ecd4967
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5ecd4967

Branch: refs/heads/master
Commit: 5ecd4967f6f9ee8ae90ea1ea7421b43fcba67d14
Parents: d74a37f
Author: Gabriel Reid 
Authored: Thu Sep 24 08:56:52 2015 +0200
Committer: Gabriel Reid 
Committed: Thu Sep 24 12:03:54 2015 +0200

--
 .../org/apache/phoenix/util/PhoenixContextExecutor.java | 9 +
 1 file changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5ecd4967/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
index 9106a5d..2da3249 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
@@ -36,14 +36,16 @@ import com.google.common.base.Throwables;
  */
 public class PhoenixContextExecutor {
 
+// We cache the class loader because calls to Class.getClassLoader are 
relatively expensive
+private static final ClassLoader CACHED_CLASSLOADER = 
PhoenixContextExecutor.class.getClassLoader();
+
 private static class CurrentContextWrapper implements CallWrapper {
 private ClassLoader saveCcl;
 
 @Override
 public void before() {
 saveCcl = Thread.currentThread().getContextClassLoader();
-Thread.currentThread().setContextClassLoader(
-PhoenixContextExecutor.class.getClassLoader());
+Thread.currentThread().setContextClassLoader(CACHED_CLASSLOADER);
 }
 
 @Override
@@ -72,8 +74,7 @@ public class PhoenixContextExecutor {
 public static  T call(Callable target) throws Exception {
 ClassLoader saveCcl = Thread.currentThread().getContextClassLoader();
 try {
-Thread.currentThread().setContextClassLoader(
-PhoenixContextExecutor.class.getClassLoader());
+Thread.currentThread().setContextClassLoader(CACHED_CLASSLOADER);
 return target.call();
 } finally {
 Thread.currentThread().setContextClassLoader(saveCcl);



phoenix git commit: PHOENIX-2289 Avoid Class.getClassLoader calls

2015-09-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 c51c9fac6 -> 2b3dedde9


PHOENIX-2289 Avoid Class.getClassLoader calls

Cache the class loader from the PhoenixContextExecutor class
to avoid relatively expensive calls to Class.getClassLoader.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2b3dedde
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2b3dedde
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2b3dedde

Branch: refs/heads/4.x-HBase-1.0
Commit: 2b3dedde9fd4ae2a2f2abfacb13ef9bc3f71e59d
Parents: c51c9fa
Author: Gabriel Reid 
Authored: Thu Sep 24 08:56:52 2015 +0200
Committer: Gabriel Reid 
Committed: Thu Sep 24 12:04:25 2015 +0200

--
 .../org/apache/phoenix/util/PhoenixContextExecutor.java | 9 +
 1 file changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2b3dedde/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
index 9106a5d..2da3249 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
@@ -36,14 +36,16 @@ import com.google.common.base.Throwables;
  */
 public class PhoenixContextExecutor {
 
+// We cache the class loader because calls to Class.getClassLoader are 
relatively expensive
+private static final ClassLoader CACHED_CLASSLOADER = 
PhoenixContextExecutor.class.getClassLoader();
+
 private static class CurrentContextWrapper implements CallWrapper {
 private ClassLoader saveCcl;
 
 @Override
 public void before() {
 saveCcl = Thread.currentThread().getContextClassLoader();
-Thread.currentThread().setContextClassLoader(
-PhoenixContextExecutor.class.getClassLoader());
+Thread.currentThread().setContextClassLoader(CACHED_CLASSLOADER);
 }
 
 @Override
@@ -72,8 +74,7 @@ public class PhoenixContextExecutor {
 public static  T call(Callable target) throws Exception {
 ClassLoader saveCcl = Thread.currentThread().getContextClassLoader();
 try {
-Thread.currentThread().setContextClassLoader(
-PhoenixContextExecutor.class.getClassLoader());
+Thread.currentThread().setContextClassLoader(CACHED_CLASSLOADER);
 return target.call();
 } finally {
 Thread.currentThread().setContextClassLoader(saveCcl);



phoenix git commit: PHOENIX-2289 Avoid Class.getClassLoader calls

2015-09-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 284c30f69 -> 9af1327b1


PHOENIX-2289 Avoid Class.getClassLoader calls

Cache the class loader from the PhoenixContextExecutor class
to avoid relatively expensive calls to Class.getClassLoader.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9af1327b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9af1327b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9af1327b

Branch: refs/heads/4.x-HBase-0.98
Commit: 9af1327b150300390d525c3d1063c7d2f5ed8663
Parents: 284c30f
Author: Gabriel Reid 
Authored: Thu Sep 24 08:56:52 2015 +0200
Committer: Gabriel Reid 
Committed: Thu Sep 24 12:04:09 2015 +0200

--
 .../org/apache/phoenix/util/PhoenixContextExecutor.java | 9 +
 1 file changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9af1327b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
index 9106a5d..2da3249 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixContextExecutor.java
@@ -36,14 +36,16 @@ import com.google.common.base.Throwables;
  */
 public class PhoenixContextExecutor {
 
+// We cache the class loader because calls to Class.getClassLoader are 
relatively expensive
+private static final ClassLoader CACHED_CLASSLOADER = 
PhoenixContextExecutor.class.getClassLoader();
+
 private static class CurrentContextWrapper implements CallWrapper {
 private ClassLoader saveCcl;
 
 @Override
 public void before() {
 saveCcl = Thread.currentThread().getContextClassLoader();
-Thread.currentThread().setContextClassLoader(
-PhoenixContextExecutor.class.getClassLoader());
+Thread.currentThread().setContextClassLoader(CACHED_CLASSLOADER);
 }
 
 @Override
@@ -72,8 +74,7 @@ public class PhoenixContextExecutor {
 public static  T call(Callable target) throws Exception {
 ClassLoader saveCcl = Thread.currentThread().getContextClassLoader();
 try {
-Thread.currentThread().setContextClassLoader(
-PhoenixContextExecutor.class.getClassLoader());
+Thread.currentThread().setContextClassLoader(CACHED_CLASSLOADER);
 return target.call();
 } finally {
 Thread.currentThread().setContextClassLoader(saveCcl);



phoenix git commit: PHOENIX-2238 Support non-printable delimiters

2015-09-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.5-HBase-0.98 2310efc89 -> c1fadb10c


PHOENIX-2238 Support non-printable delimiters

Work around serialization issues for non-printable characters
in Hadoop Configuration objects by base64-encoding the delimiter
characters for CSV bulk load.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c1fadb10
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c1fadb10
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c1fadb10

Branch: refs/heads/4.5-HBase-0.98
Commit: c1fadb10ce33a08349b6ec93dc27b740aa8ba2cb
Parents: 2310efc
Author: Gabriel Reid 
Authored: Fri Sep 11 22:50:01 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 20:31:10 2015 +0200

--
 .../phoenix/mapreduce/CsvBulkImportUtil.java| 22 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  6 ++-
 .../mapreduce/CsvBulkImportUtilTest.java| 57 
 3 files changed, 71 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1fadb10/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index 8f0f7d5..6d77cd5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -19,7 +19,9 @@ package org.apache.phoenix.mapreduce;
 
 import java.util.List;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Base64;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.util.ColumnInfo;
 
@@ -49,9 +51,9 @@ public class CsvBulkImportUtil {
 Preconditions.checkNotNull(columnInfoList);
 Preconditions.checkArgument(!columnInfoList.isEmpty(), "Column info 
list is empty");
 conf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, tableName);
-conf.set(CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
String.valueOf(fieldDelimiter));
-conf.set(CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, 
String.valueOf(quoteChar));
-conf.set(CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, 
String.valueOf(escapeChar));
+setChar(conf, CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
fieldDelimiter);
+setChar(conf, CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, quoteChar);
+setChar(conf, CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, escapeChar);
 if (arrayDelimiter != null) {
 conf.set(CsvToKeyValueMapper.ARRAY_DELIMITER_CONFKEY, 
arrayDelimiter);
 }
@@ -70,4 +72,18 @@ public class CsvBulkImportUtil {
 conf.setClass(PhoenixConfigurationUtil.UPSERT_HOOK_CLASS_CONFKEY, 
processorClass,
 ImportPreUpsertKeyValueProcessor.class);
 }
+
+@VisibleForTesting
+static void setChar(Configuration conf, String confKey, char charValue) {
+conf.set(confKey, 
Base64.encodeBytes(Character.toString(charValue).getBytes()));
+}
+
+@VisibleForTesting
+static Character getCharacter(Configuration conf, String confKey) {
+String strValue = conf.get(confKey);
+if (strValue == null) {
+return null;
+}
+return new String(Base64.decode(strValue)).charAt(0);
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1fadb10/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
index c0328bd..87420c8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
@@ -124,8 +124,10 @@ public class CsvToKeyValueMapper extends 
Mapper

[2/2] phoenix git commit: PHOENIX-2238 Support non-printable delimiters

2015-09-19 Thread greid
PHOENIX-2238 Support non-printable delimiters

Work around serialization issues for non-printable characters
in Hadoop Configuration objects by base64-encoding the delimiter
characters for CSV bulk load.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6dcc8826
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6dcc8826
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6dcc8826

Branch: refs/heads/master
Commit: 6dcc88262386ce6340c1a60a9f0b5e4cb6c787ed
Parents: 8864837
Author: Gabriel Reid 
Authored: Fri Sep 11 22:50:01 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 20:30:33 2015 +0200

--
 .../phoenix/mapreduce/CsvBulkImportUtil.java| 22 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  6 ++-
 .../mapreduce/CsvBulkImportUtilTest.java| 57 
 3 files changed, 71 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6dcc8826/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index 8f0f7d5..6d77cd5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -19,7 +19,9 @@ package org.apache.phoenix.mapreduce;
 
 import java.util.List;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Base64;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.util.ColumnInfo;
 
@@ -49,9 +51,9 @@ public class CsvBulkImportUtil {
 Preconditions.checkNotNull(columnInfoList);
 Preconditions.checkArgument(!columnInfoList.isEmpty(), "Column info 
list is empty");
 conf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, tableName);
-conf.set(CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
String.valueOf(fieldDelimiter));
-conf.set(CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, 
String.valueOf(quoteChar));
-conf.set(CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, 
String.valueOf(escapeChar));
+setChar(conf, CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
fieldDelimiter);
+setChar(conf, CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, quoteChar);
+setChar(conf, CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, escapeChar);
 if (arrayDelimiter != null) {
 conf.set(CsvToKeyValueMapper.ARRAY_DELIMITER_CONFKEY, 
arrayDelimiter);
 }
@@ -70,4 +72,18 @@ public class CsvBulkImportUtil {
 conf.setClass(PhoenixConfigurationUtil.UPSERT_HOOK_CLASS_CONFKEY, 
processorClass,
 ImportPreUpsertKeyValueProcessor.class);
 }
+
+@VisibleForTesting
+static void setChar(Configuration conf, String confKey, char charValue) {
+conf.set(confKey, 
Base64.encodeBytes(Character.toString(charValue).getBytes()));
+}
+
+@VisibleForTesting
+static Character getCharacter(Configuration conf, String confKey) {
+String strValue = conf.get(confKey);
+if (strValue == null) {
+return null;
+}
+return new String(Base64.decode(strValue)).charAt(0);
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6dcc8826/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
index 68270d4..2e69048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
@@ -125,8 +125,10 @@ public class CsvToKeyValueMapper extends 
Mapper

[1/2] phoenix git commit: PHOENIX-2239 Improve strict mode in psql CSV load

2015-09-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 29535f342 -> 6dcc88262


PHOENIX-2239 Improve strict mode in psql CSV load

Enforce strict mode when individual rows can't be parsed or
upserted.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/88648372
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/88648372
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/88648372

Branch: refs/heads/master
Commit: 886483728f0376dea748258348273b214382b535
Parents: 29535f3
Author: Gabriel Reid 
Authored: Wed Sep 9 11:08:42 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 18:30:36 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java | 40 +++-
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  8 ++--
 .../apache/phoenix/util/CSVCommonsLoader.java   | 15 ++--
 .../phoenix/util/csv/CsvUpsertExecutor.java |  4 +-
 .../phoenix/util/csv/CsvUpsertExecutorTest.java |  5 ++-
 5 files changed, 58 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/88648372/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index c7287ea..b78bb63 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -32,10 +32,12 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.Properties;
 
+import com.google.common.collect.ImmutableList;
 import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixTestDriver;
+import org.apache.phoenix.schema.IllegalDataException;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.CSVCommonsLoader;
@@ -403,6 +405,40 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+// Ensure that strict mode also causes the import to stop if a data type 
on a single
+// row is not correct
+@Test
+public void testCSVUpsertWithInvalidNumericalData_StrictMode() throws 
Exception {
+CSVParser parser = null;
+PhoenixConnection conn = null;
+try {
+// Create table
+String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
++ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY_ID 
BIGINT);";
+conn = DriverManager.getConnection(getUrl())
+.unwrap(PhoenixConnection.class);
+PhoenixRuntime.executeStatements(conn,
+new StringReader(statements), null);
+
+// Upsert CSV file in strict mode
+CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
+Arrays.asList("SYMBOL", "COMPANY_ID"), true);
+try {
+csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
+fail("Running an upsert with data that can't be upserted in 
strict mode "
++ "should throw an exception");
+} catch (IllegalDataException e) {
+// Expected
+}
+
+} finally {
+if (parser != null)
+parser.close();
+if (conn != null)
+conn.close();
+}
+}
+
 @Test
 public void testCSVUpsertWithAllColumn() throws Exception {
 CSVParser parser = null;
@@ -631,7 +667,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Upsert CSV file
 CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, 
"ARRAY_TABLE",
-null, true, ',', '"', null, "!");
+ImmutableList.of(), true, ',', '"', null, "!");
 csvUtil.upsert(
 new StringReader("ID,VALARRAY\n"
 + "1,2!3!4\n"));
@@ -670,7 +706,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Upsert CSV file
 CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE",
-null, true, ',', '"', null, "!");
+ImmutableList.of(), true, ',', '"', null, "!");
 csvUtil.upsert(
 new StringReader("ID,TS\n"
 + "1,1970-01-01 00:00:10\n"


phoenix git commit: PHOENIX-2238 Support non-printable delimiters

2015-09-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.5-HBase-1.0 b2cb0005b -> c35c47571


PHOENIX-2238 Support non-printable delimiters

Work around serialization issues for non-printable characters
in Hadoop Configuration objects by base64-encoding the delimiter
characters for CSV bulk load.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c35c4757
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c35c4757
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c35c4757

Branch: refs/heads/4.5-HBase-1.0
Commit: c35c475711593b2dfc1f8a9ec05c09688cca3e7c
Parents: b2cb000
Author: Gabriel Reid 
Authored: Fri Sep 11 22:50:01 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 20:31:17 2015 +0200

--
 .../phoenix/mapreduce/CsvBulkImportUtil.java| 22 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  6 ++-
 .../mapreduce/CsvBulkImportUtilTest.java| 57 
 3 files changed, 71 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c35c4757/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index 8f0f7d5..6d77cd5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -19,7 +19,9 @@ package org.apache.phoenix.mapreduce;
 
 import java.util.List;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Base64;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.util.ColumnInfo;
 
@@ -49,9 +51,9 @@ public class CsvBulkImportUtil {
 Preconditions.checkNotNull(columnInfoList);
 Preconditions.checkArgument(!columnInfoList.isEmpty(), "Column info 
list is empty");
 conf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, tableName);
-conf.set(CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
String.valueOf(fieldDelimiter));
-conf.set(CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, 
String.valueOf(quoteChar));
-conf.set(CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, 
String.valueOf(escapeChar));
+setChar(conf, CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
fieldDelimiter);
+setChar(conf, CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, quoteChar);
+setChar(conf, CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, escapeChar);
 if (arrayDelimiter != null) {
 conf.set(CsvToKeyValueMapper.ARRAY_DELIMITER_CONFKEY, 
arrayDelimiter);
 }
@@ -70,4 +72,18 @@ public class CsvBulkImportUtil {
 conf.setClass(PhoenixConfigurationUtil.UPSERT_HOOK_CLASS_CONFKEY, 
processorClass,
 ImportPreUpsertKeyValueProcessor.class);
 }
+
+@VisibleForTesting
+static void setChar(Configuration conf, String confKey, char charValue) {
+conf.set(confKey, 
Base64.encodeBytes(Character.toString(charValue).getBytes()));
+}
+
+@VisibleForTesting
+static Character getCharacter(Configuration conf, String confKey) {
+String strValue = conf.get(confKey);
+if (strValue == null) {
+return null;
+}
+return new String(Base64.decode(strValue)).charAt(0);
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c35c4757/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
index c0328bd..87420c8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
@@ -124,8 +124,10 @@ public class CsvToKeyValueMapper extends 
Mapper

[1/2] phoenix git commit: PHOENIX-2239 Improve strict mode in psql CSV load

2015-09-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 2af5f8cb4 -> 6cd79361c


PHOENIX-2239 Improve strict mode in psql CSV load

Enforce strict mode when individual rows can't be parsed or
upserted.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a1f61448
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a1f61448
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a1f61448

Branch: refs/heads/4.x-HBase-0.98
Commit: a1f61448a6334fe16a9f3b48c897d1a501ce748c
Parents: 2af5f8c
Author: Gabriel Reid 
Authored: Wed Sep 9 11:08:42 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 15:36:46 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java | 40 +++-
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  8 ++--
 .../apache/phoenix/util/CSVCommonsLoader.java   | 15 ++--
 .../phoenix/util/csv/CsvUpsertExecutor.java |  4 +-
 .../phoenix/util/csv/CsvUpsertExecutorTest.java |  5 ++-
 5 files changed, 58 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a1f61448/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index 8f5ec89..906bc2f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -32,10 +32,12 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.Properties;
 
+import com.google.common.collect.ImmutableList;
 import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixTestDriver;
+import org.apache.phoenix.schema.IllegalDataException;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.CSVCommonsLoader;
@@ -403,6 +405,40 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+// Ensure that strict mode also causes the import to stop if a data type 
on a single
+// row is not correct
+@Test
+public void testCSVUpsertWithInvalidNumericalData_StrictMode() throws 
Exception {
+CSVParser parser = null;
+PhoenixConnection conn = null;
+try {
+// Create table
+String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
++ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY_ID 
BIGINT);";
+conn = DriverManager.getConnection(getUrl())
+.unwrap(PhoenixConnection.class);
+PhoenixRuntime.executeStatements(conn,
+new StringReader(statements), null);
+
+// Upsert CSV file in strict mode
+CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
+Arrays.asList("SYMBOL", "COMPANY_ID"), true);
+try {
+csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
+fail("Running an upsert with data that can't be upserted in 
strict mode "
++ "should throw an exception");
+} catch (IllegalDataException e) {
+// Expected
+}
+
+} finally {
+if (parser != null)
+parser.close();
+if (conn != null)
+conn.close();
+}
+}
+
 @Test
 public void testCSVUpsertWithAllColumn() throws Exception {
 CSVParser parser = null;
@@ -631,7 +667,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Upsert CSV file
 CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, 
"ARRAY_TABLE",
-null, true, ',', '"', null, "!");
+ImmutableList.of(), true, ',', '"', null, "!");
 csvUtil.upsert(
 new StringReader("ID,VALARRAY\n"
 + "1,2!3!4\n"));
@@ -670,7 +706,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Upsert CSV file
 CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE",
-null, true, ',', '"', null, "!");
+ImmutableList.of(), true, ',', '"', null, "!");
 csvUtil.upsert(
 new StringReader("ID,TS\n"
 + "1,1970-01-01 00:00:10\n"


[1/2] phoenix git commit: PHOENIX-2239 Improve strict mode in psql CSV load

2015-09-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 4530613ee -> 687c85f0c


PHOENIX-2239 Improve strict mode in psql CSV load

Enforce strict mode when individual rows can't be parsed or
upserted.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ca341c49
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ca341c49
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ca341c49

Branch: refs/heads/4.x-HBase-1.0
Commit: ca341c49dc4d0ab577d4b45967da97c6bf743c84
Parents: 4530613
Author: Gabriel Reid 
Authored: Wed Sep 9 11:08:42 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 18:30:24 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java | 40 +++-
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  8 ++--
 .../apache/phoenix/util/CSVCommonsLoader.java   | 15 ++--
 .../phoenix/util/csv/CsvUpsertExecutor.java |  4 +-
 .../phoenix/util/csv/CsvUpsertExecutorTest.java |  5 ++-
 5 files changed, 58 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca341c49/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index c7287ea..b78bb63 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -32,10 +32,12 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.Properties;
 
+import com.google.common.collect.ImmutableList;
 import org.apache.commons.csv.CSVParser;
 import org.apache.commons.csv.CSVRecord;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixTestDriver;
+import org.apache.phoenix.schema.IllegalDataException;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.CSVCommonsLoader;
@@ -403,6 +405,40 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+// Ensure that strict mode also causes the import to stop if a data type 
on a single
+// row is not correct
+@Test
+public void testCSVUpsertWithInvalidNumericalData_StrictMode() throws 
Exception {
+CSVParser parser = null;
+PhoenixConnection conn = null;
+try {
+// Create table
+String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
++ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY_ID 
BIGINT);";
+conn = DriverManager.getConnection(getUrl())
+.unwrap(PhoenixConnection.class);
+PhoenixRuntime.executeStatements(conn,
+new StringReader(statements), null);
+
+// Upsert CSV file in strict mode
+CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
+Arrays.asList("SYMBOL", "COMPANY_ID"), true);
+try {
+csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
+fail("Running an upsert with data that can't be upserted in 
strict mode "
++ "should throw an exception");
+} catch (IllegalDataException e) {
+// Expected
+}
+
+} finally {
+if (parser != null)
+parser.close();
+if (conn != null)
+conn.close();
+}
+}
+
 @Test
 public void testCSVUpsertWithAllColumn() throws Exception {
 CSVParser parser = null;
@@ -631,7 +667,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Upsert CSV file
 CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, 
"ARRAY_TABLE",
-null, true, ',', '"', null, "!");
+ImmutableList.of(), true, ',', '"', null, "!");
 csvUtil.upsert(
 new StringReader("ID,VALARRAY\n"
 + "1,2!3!4\n"));
@@ -670,7 +706,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Upsert CSV file
 CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE",
-null, true, ',', '"', null, "!");
+ImmutableList.of(), true, ',', '"', null, "!");
 csvUtil.upsert(
 new StringReader("ID,TS\n"
 + "1,1970-01-01 00:00:10\n"


phoenix git commit: PHOENIX-2238 Support non-printable delimiters

2015-09-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.5-HBase-1.1 47e26a402 -> 2ab807d1e


PHOENIX-2238 Support non-printable delimiters

Work around serialization issues for non-printable characters
in Hadoop Configuration objects by base64-encoding the delimiter
characters for CSV bulk load.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2ab807d1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2ab807d1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2ab807d1

Branch: refs/heads/4.5-HBase-1.1
Commit: 2ab807d1ef8cb9c1cc06bfd53e8a89ce7379c57f
Parents: 47e26a4
Author: Gabriel Reid 
Authored: Fri Sep 11 22:50:01 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 20:31:21 2015 +0200

--
 .../phoenix/mapreduce/CsvBulkImportUtil.java| 22 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  6 ++-
 .../mapreduce/CsvBulkImportUtilTest.java| 57 
 3 files changed, 71 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2ab807d1/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index 8f0f7d5..6d77cd5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -19,7 +19,9 @@ package org.apache.phoenix.mapreduce;
 
 import java.util.List;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Base64;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.util.ColumnInfo;
 
@@ -49,9 +51,9 @@ public class CsvBulkImportUtil {
 Preconditions.checkNotNull(columnInfoList);
 Preconditions.checkArgument(!columnInfoList.isEmpty(), "Column info 
list is empty");
 conf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, tableName);
-conf.set(CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
String.valueOf(fieldDelimiter));
-conf.set(CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, 
String.valueOf(quoteChar));
-conf.set(CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, 
String.valueOf(escapeChar));
+setChar(conf, CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
fieldDelimiter);
+setChar(conf, CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, quoteChar);
+setChar(conf, CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, escapeChar);
 if (arrayDelimiter != null) {
 conf.set(CsvToKeyValueMapper.ARRAY_DELIMITER_CONFKEY, 
arrayDelimiter);
 }
@@ -70,4 +72,18 @@ public class CsvBulkImportUtil {
 conf.setClass(PhoenixConfigurationUtil.UPSERT_HOOK_CLASS_CONFKEY, 
processorClass,
 ImportPreUpsertKeyValueProcessor.class);
 }
+
+@VisibleForTesting
+static void setChar(Configuration conf, String confKey, char charValue) {
+conf.set(confKey, 
Base64.encodeBytes(Character.toString(charValue).getBytes()));
+}
+
+@VisibleForTesting
+static Character getCharacter(Configuration conf, String confKey) {
+String strValue = conf.get(confKey);
+if (strValue == null) {
+return null;
+}
+return new String(Base64.decode(strValue)).charAt(0);
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2ab807d1/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
index c0328bd..87420c8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
@@ -124,8 +124,10 @@ public class CsvToKeyValueMapper extends 
Mapper

[2/2] phoenix git commit: PHOENIX-2238 Support non-printable delimiters

2015-09-19 Thread greid
PHOENIX-2238 Support non-printable delimiters

Work around serialization issues for non-printable characters
in Hadoop Configuration objects by base64-encoding the delimiter
characters for CSV bulk load.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/687c85f0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/687c85f0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/687c85f0

Branch: refs/heads/4.x-HBase-1.0
Commit: 687c85f0cf17dd6371361bbb5f5bdfb2a3974782
Parents: ca341c4
Author: Gabriel Reid 
Authored: Fri Sep 11 22:50:01 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 20:30:53 2015 +0200

--
 .../phoenix/mapreduce/CsvBulkImportUtil.java| 22 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  6 ++-
 .../mapreduce/CsvBulkImportUtilTest.java| 57 
 3 files changed, 71 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/687c85f0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index 8f0f7d5..6d77cd5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -19,7 +19,9 @@ package org.apache.phoenix.mapreduce;
 
 import java.util.List;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Base64;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.util.ColumnInfo;
 
@@ -49,9 +51,9 @@ public class CsvBulkImportUtil {
 Preconditions.checkNotNull(columnInfoList);
 Preconditions.checkArgument(!columnInfoList.isEmpty(), "Column info 
list is empty");
 conf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, tableName);
-conf.set(CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
String.valueOf(fieldDelimiter));
-conf.set(CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, 
String.valueOf(quoteChar));
-conf.set(CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, 
String.valueOf(escapeChar));
+setChar(conf, CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
fieldDelimiter);
+setChar(conf, CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, quoteChar);
+setChar(conf, CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, escapeChar);
 if (arrayDelimiter != null) {
 conf.set(CsvToKeyValueMapper.ARRAY_DELIMITER_CONFKEY, 
arrayDelimiter);
 }
@@ -70,4 +72,18 @@ public class CsvBulkImportUtil {
 conf.setClass(PhoenixConfigurationUtil.UPSERT_HOOK_CLASS_CONFKEY, 
processorClass,
 ImportPreUpsertKeyValueProcessor.class);
 }
+
+@VisibleForTesting
+static void setChar(Configuration conf, String confKey, char charValue) {
+conf.set(confKey, 
Base64.encodeBytes(Character.toString(charValue).getBytes()));
+}
+
+@VisibleForTesting
+static Character getCharacter(Configuration conf, String confKey) {
+String strValue = conf.get(confKey);
+if (strValue == null) {
+return null;
+}
+return new String(Base64.decode(strValue)).charAt(0);
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/687c85f0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
index 68270d4..2e69048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
@@ -125,8 +125,10 @@ public class CsvToKeyValueMapper extends 
Mapper

[2/2] phoenix git commit: PHOENIX-2238 Support non-printable delimiters

2015-09-19 Thread greid
PHOENIX-2238 Support non-printable delimiters

Work around serialization issues for non-printable characters
in Hadoop Configuration objects by base64-encoding the delimiter
characters for CSV bulk load.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6cd79361
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6cd79361
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6cd79361

Branch: refs/heads/4.x-HBase-0.98
Commit: 6cd79361cb1a694b4c040bbd2cf332be0e925673
Parents: a1f6144
Author: Gabriel Reid 
Authored: Fri Sep 11 22:50:01 2015 +0200
Committer: Gabriel Reid 
Committed: Sat Sep 19 20:30:46 2015 +0200

--
 .../phoenix/mapreduce/CsvBulkImportUtil.java| 22 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  |  6 ++-
 .../mapreduce/CsvBulkImportUtilTest.java| 57 
 3 files changed, 71 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6cd79361/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
index 8f0f7d5..6d77cd5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkImportUtil.java
@@ -19,7 +19,9 @@ package org.apache.phoenix.mapreduce;
 
 import java.util.List;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Base64;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
 import org.apache.phoenix.util.ColumnInfo;
 
@@ -49,9 +51,9 @@ public class CsvBulkImportUtil {
 Preconditions.checkNotNull(columnInfoList);
 Preconditions.checkArgument(!columnInfoList.isEmpty(), "Column info 
list is empty");
 conf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, tableName);
-conf.set(CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
String.valueOf(fieldDelimiter));
-conf.set(CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, 
String.valueOf(quoteChar));
-conf.set(CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, 
String.valueOf(escapeChar));
+setChar(conf, CsvToKeyValueMapper.FIELD_DELIMITER_CONFKEY, 
fieldDelimiter);
+setChar(conf, CsvToKeyValueMapper.QUOTE_CHAR_CONFKEY, quoteChar);
+setChar(conf, CsvToKeyValueMapper.ESCAPE_CHAR_CONFKEY, escapeChar);
 if (arrayDelimiter != null) {
 conf.set(CsvToKeyValueMapper.ARRAY_DELIMITER_CONFKEY, 
arrayDelimiter);
 }
@@ -70,4 +72,18 @@ public class CsvBulkImportUtil {
 conf.setClass(PhoenixConfigurationUtil.UPSERT_HOOK_CLASS_CONFKEY, 
processorClass,
 ImportPreUpsertKeyValueProcessor.class);
 }
+
+@VisibleForTesting
+static void setChar(Configuration conf, String confKey, char charValue) {
+conf.set(confKey, 
Base64.encodeBytes(Character.toString(charValue).getBytes()));
+}
+
+@VisibleForTesting
+static Character getCharacter(Configuration conf, String confKey) {
+String strValue = conf.get(confKey);
+if (strValue == null) {
+return null;
+}
+return new String(Base64.decode(strValue)).charAt(0);
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6cd79361/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
index 68270d4..2e69048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvToKeyValueMapper.java
@@ -125,8 +125,10 @@ public class CsvToKeyValueMapper extends 
Mapper

Git Push Summary

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 [deleted] 954a4a1b6


phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.1 eeefd6d12 - b07f68a60


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b07f68a6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b07f68a6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b07f68a6

Branch: refs/heads/4.4-HBase-1.1
Commit: b07f68a603dca48a53828f8c20c5f92478bf69e3
Parents: eeefd6d
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:26 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b07f68a6/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b07f68a6/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 6d322103e - 04e9d58d1


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/04e9d58d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/04e9d58d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/04e9d58d

Branch: refs/heads/4.x-HBase-1.0
Commit: 04e9d58d173e5bed05ab893237a60214e1756520
Parents: 6d32210
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:46 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/04e9d58d/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/04e9d58d/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[28/34] phoenix git commit: PHOENIX-2029 Queries are making two rpc calls for getTable

2015-07-20 Thread greid
PHOENIX-2029 Queries are making two rpc calls for getTable


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/52f5b046
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/52f5b046
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/52f5b046

Branch: refs/heads/4.x-HBase-1.1
Commit: 52f5b04643914f33c2d00a1157ca767a32f1adb8
Parents: 43c722c
Author: Thomas D'Silva twdsi...@gmail.com
Authored: Mon Jun 8 15:30:40 2015 -0700
Committer: Thomas D'Silva tdsi...@salesforce.com
Committed: Wed Jun 17 11:16:48 2015 -0700

--
 .../org/apache/phoenix/rpc/UpdateCacheIT.java   | 139 +++
 .../apache/phoenix/compile/QueryCompiler.java   |   2 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   6 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  26 ++--
 4 files changed, 156 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/52f5b046/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
new file mode 100644
index 000..c657e41
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/rpc/UpdateCacheIT.java
@@ -0,0 +1,139 @@
+package org.apache.phoenix.rpc;
+
+import static org.apache.phoenix.util.TestUtil.INDEX_DATA_SCHEMA;
+import static org.apache.phoenix.util.TestUtil.MUTABLE_INDEX_DATA_TABLE;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.end2end.Shadower;
+import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
+import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.MetaDataClient;
+import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.types.PVarchar;
+import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.google.common.collect.Maps;
+
+/**
+ * Verifies the number of rpcs calls from {@link MetaDataClient} updateCache() 
+ * for transactional and non-transactional tables.
+ */
+public class UpdateCacheIT extends BaseHBaseManagedTimeIT {
+   
+   public static final int NUM_MILLIS_IN_DAY = 8640;
+
+@Before
+public void setUp() throws SQLException {
+ensureTableCreated(getUrl(), MUTABLE_INDEX_DATA_TABLE);
+}
+
+   @BeforeClass
+@Shadower(classBeingShadowed = BaseHBaseManagedTimeIT.class)
+public static void doSetup() throws Exception {
+MapString,String props = Maps.newHashMapWithExpectedSize(3);
+setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+}
+   
+   public static void validateRowKeyColumns(ResultSet rs, int i) throws 
SQLException {
+   assertTrue(rs.next());
+   assertEquals(rs.getString(1), varchar + String.valueOf(i));
+   assertEquals(rs.getString(2), char + String.valueOf(i));
+   assertEquals(rs.getInt(3), i);
+   assertEquals(rs.getInt(4), i);
+   assertEquals(rs.getBigDecimal(5), new BigDecimal(i*0.5d));
+   Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+   assertEquals(rs.getDate(6), date);
+   }
+   
+   public static void setRowKeyColumns(PreparedStatement stmt, int i) 
throws SQLException {
+// insert row
+stmt.setString(1, varchar + String.valueOf(i));
+stmt.setString(2, char + String.valueOf(i));
+stmt.setInt(3, i);
+stmt.setLong(4, i);
+stmt.setBigDecimal(5, new BigDecimal(i*0.5d));
+Date date = new Date(DateUtil.parseDate(2015-01-01 
00:00:00).getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+stmt.setDate(6, date);
+}
+   
+   @Test
+   public 

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.0 d226c6a63 - 2501ecf74


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2501ecf7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2501ecf7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2501ecf7

Branch: refs/heads/4.4-HBase-1.0
Commit: 2501ecf744a5b7046997317a6a2f360abffece32
Parents: d226c6a
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:14 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2501ecf7/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2501ecf7/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[01/34] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 [created] 954a4a1b6


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
index 272cac6..e7e1dd7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsCollector.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
@@ -111,7 +111,7 @@ public class StatisticsCollector {
 this.statsTable.close();
 }
 
-public void updateStatistic(HRegion region) {
+public void updateStatistic(Region region) {
 try {
 ArrayListMutation mutations = new ArrayListMutation();
 writeStatsToStatsTable(region, true, mutations, 
TimeKeeper.SYSTEM.getCurrentTime());
@@ -126,7 +126,7 @@ public class StatisticsCollector {
 }
 }
 
-private void writeStatsToStatsTable(final HRegion region,
+private void writeStatsToStatsTable(final Region region,
 boolean delete, ListMutation mutations, long currentTime) throws 
IOException {
 try {
 // update the statistics table
@@ -215,7 +215,7 @@ public class StatisticsCollector {
 }
 }
 
-public InternalScanner createCompactionScanner(HRegion region, Store 
store, InternalScanner s) throws IOException {
+public InternalScanner createCompactionScanner(Region region, Store store, 
InternalScanner s) throws IOException {
 // See if this is for Major compaction
 if (logger.isDebugEnabled()) {
 logger.debug(Compaction scanner created for stats);
@@ -224,13 +224,13 @@ public class StatisticsCollector {
 return getInternalScanner(region, store, s, cfKey);
 }
 
-public void splitStats(HRegion parent, HRegion left, HRegion right) {
+public void splitStats(Region parent, Region left, Region right) {
 try {
 if (logger.isDebugEnabled()) {
 logger.debug(Collecting stats for split of  + 
parent.getRegionInfo() +  into  + left.getRegionInfo() +  and  + 
right.getRegionInfo());
 }
 ListMutation mutations = Lists.newArrayListWithExpectedSize(3);
-for (byte[] fam : parent.getStores().keySet()) {
+for (byte[] fam : parent.getTableDesc().getFamiliesKeys()) {
statsTable.splitStats(parent, left, right, this, new 
ImmutableBytesPtr(fam), mutations);
 }
 if (logger.isDebugEnabled()) {
@@ -243,7 +243,7 @@ public class StatisticsCollector {
 }
 }
 
-protected InternalScanner getInternalScanner(HRegion region, Store store,
+protected InternalScanner getInternalScanner(Region region, Store store,
 InternalScanner internalScan, ImmutableBytesPtr family) {
 return new StatisticsScanner(this, statsTable, region, internalScan, 
family);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
index 0e50923..582c4de 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/stats/StatisticsScanner.java
@@ -26,9 +26,9 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 
 /**
@@ -38,11 +38,11 @@ public class StatisticsScanner implements InternalScanner {
 private static final Log LOG = 

[07/34] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode

2015-07-20 Thread greid
PHOENIX-2005 Connection utilities omit zk client port, parent znode


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c6b37b97
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c6b37b97
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c6b37b97

Branch: refs/heads/4.x-HBase-1.1
Commit: c6b37b979da1b514bcb9257c7e095e39b0c2c215
Parents: 3cdc323
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 11:11:48 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 13:27:03 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java | 28 --
 .../phoenix/mapreduce/CsvBulkLoadTool.java  | 93 ++--
 .../phoenix/mapreduce/CsvToKeyValueMapper.java  | 26 +-
 .../query/ConnectionQueryServicesImpl.java  |  4 +-
 .../java/org/apache/phoenix/util/QueryUtil.java | 45 --
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 14 ++-
 .../phoenix/mapreduce/CsvBulkLoadToolTest.java  | 11 ---
 .../mapreduce/CsvToKeyValueMapperTest.java  | 15 
 .../org/apache/phoenix/util/QueryUtilTest.java  | 33 ---
 9 files changed, 139 insertions(+), 130 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c6b37b97/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 9e95667..2451603 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -31,6 +31,7 @@ import java.util.logging.Logger;
 
 import javax.annotation.concurrent.Immutable;
 
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -174,10 +175,10 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 }
 
 /**
- * 
+ *
  * Class to encapsulate connection info for HBase
  *
- * 
+ *
  * @since 0.1.1
  */
 public static class ConnectionInfo {
@@ -204,12 +205,18 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 return false;
 }
 
-protected static ConnectionInfo create(String url) throws SQLException 
{
-StringTokenizer tokenizer = new StringTokenizer(url == null ?  : 
url.substring(PhoenixRuntime.JDBC_PROTOCOL.length()),DELIMITERS, true);
+public static ConnectionInfo create(String url) throws SQLException {
+url = url == null ?  : url;
+url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
+? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
+: url;
+StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];
 String token = null;
-while (tokenizer.hasMoreTokens()  
!(token=tokenizer.nextToken()).equals(TERMINATOR)  tokenizer.hasMoreTokens() 
 nTokens  tokens.length) {
+while (tokenizer.hasMoreTokens() 
+!(token=tokenizer.nextToken()).equals(TERMINATOR) 
+tokenizer.hasMoreTokens()  nTokens  tokens.length) {
 token = tokenizer.nextToken();
 // This would mean we have an empty string for a token which 
is illegal
 if (DELIMITERS.contains(token)) {
@@ -316,8 +323,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 private final String principal;
 private final String keytab;
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode, 
String principal, String keytab) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode, String principal, String keytab) {
 this.zookeeperQuorum = zookeeperQuorum;
 this.port = port;
 this.rootNode = rootNode;
@@ -326,8 +332,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 this.keytab = keytab;
 }
 
-// used for testing
-ConnectionInfo(String zookeeperQuorum, Integer port, String rootNode) {
+public ConnectionInfo(String zookeeperQuorum, Integer port, String 
rootNode) {
this(zookeeperQuorum, port, rootNode, null, null);
 }
 
@@ -417,6 +422,11 @@ 

[29/34] phoenix git commit: PHOENIX-1941 Phoenix tests are failing in linux env with missing class: StaticMapping (Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-1941 Phoenix tests are failing in linux env with missing class: 
StaticMapping (Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/329d7494
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/329d7494
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/329d7494

Branch: refs/heads/4.x-HBase-1.1
Commit: 329d74948521ed974593e455369a27d9cd705249
Parents: 52f5b04
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:17:33 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:23:47 2015 -0700

--
 .../phoenix/end2end/End2EndTestDriver.java   | 19 +++
 1 file changed, 15 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/329d7494/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
index 26d18cf..743f729 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/End2EndTestDriver.java
@@ -21,6 +21,7 @@ package org.apache.phoenix.end2end;
 
 import java.io.IOException;
 import java.io.PrintStream;
+import java.lang.annotation.Annotation;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -79,10 +80,20 @@ public class End2EndTestDriver extends AbstractHBaseTool {
 
   @Override
   public boolean isCandidateClass(Class? c) {
-return testFilterRe.matcher(c.getName()).find() 
-  // Our pattern will match the below NON-IntegrationTest. Rather than
-  // do exotic regex, just filter it out here
-  super.isCandidateClass(c);
+  Annotation[] annotations = c.getAnnotations();
+  for (Annotation curAnnotation : annotations) {
+  if 
(curAnnotation.toString().contains(NeedsOwnMiniClusterTest)) {
+  /* Skip tests that aren't designed to run against a live 
cluster.
+   * For a live cluster, we cannot bring it up and down as 
required
+   * for these tests to run.
+   */
+  return false;
+  }
+  }
+  return testFilterRe.matcher(c.getName()).find() 
+  // Our pattern will match the below NON-IntegrationTest. 
Rather than
+  // do exotic regex, just filter it out here
+  super.isCandidateClass(c);
   }
 }
 



[12/34] phoenix git commit: PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-1939 Test are failing with DoNotRetryIOException: ATABLE: null (Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a600cc4d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a600cc4d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a600cc4d

Branch: refs/heads/4.x-HBase-1.1
Commit: a600cc4d7acc2c828ae7782e59d094f99e5631f0
Parents: c95e28d
Author: Nick Dimiduk ndimi...@apache.org
Authored: Fri May 29 17:12:25 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri May 29 17:13:08 2015 -0700

--
 .../src/main/java/org/apache/phoenix/schema/PTableImpl.java  | 4 ++--
 .../src/test/java/org/apache/phoenix/query/BaseTest.java | 3 ++-
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a600cc4d/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index bf4420c..bdc95b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -88,8 +88,8 @@ public class PTableImpl implements PTable {
 
 private PTableKey key;
 private PName name;
-private PName schemaName;
-private PName tableName;
+private PName schemaName = PName.EMPTY_NAME;
+private PName tableName = PName.EMPTY_NAME;
 private PName tenantId;
 private PTableType type;
 private PIndexState state;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a600cc4d/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..b0574c3 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -121,7 +121,6 @@ import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.ipc.controller.ServerRpcControllerFactory;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.LocalIndexMerger;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -829,6 +828,7 @@ public abstract class BaseTest {
 logger.info(Table  + fullTableName +  is already 
deleted.);
 }
 }
+rs.close();
 if (lastTenantId != null) {
 conn.close();
 }
@@ -860,6 +860,7 @@ public abstract class BaseTest {
 logger.info(DROP SEQUENCE STATEMENT: DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 conn.createStatement().execute(DROP SEQUENCE  + 
SchemaUtil.getEscapedTableName(rs.getString(2), rs.getString(3)));
 }
+rs.close();
 }
 
 protected static void initSumDoubleValues(byte[][] splits, String url) 
throws Exception {



[19/34] phoenix git commit: PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level

2015-07-20 Thread greid
PHOENIX-2012 RowKeyComparisonFilter logs unencoded data at DEBUG level


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9c5f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9c5f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9c5f

Branch: refs/heads/4.x-HBase-1.1
Commit: 9c5fae456f3a0934e43e02af0ef5188b9337
Parents: 29ea503
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 15:58:32 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 15:57:15 2015 -0700

--
 .../java/org/apache/phoenix/filter/RowKeyComparisonFilter.java  | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9c5f/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
index 2e2037b..b7de7ac 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/filter/RowKeyComparisonFilter.java
@@ -73,8 +73,9 @@ public class RowKeyComparisonFilter extends 
BooleanExpressionFilter {
 if (evaluate) {
 inputTuple.setKey(v.getRowArray(), v.getRowOffset(), 
v.getRowLength());
 this.keepRow = Boolean.TRUE.equals(evaluate(inputTuple));
-if (logger.isDebugEnabled()) {
-logger.debug(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)  +  row  + inputTuple);
+if (logger.isTraceEnabled()) {
+logger.trace(RowKeyComparisonFilter:  + (this.keepRow ? 
KEEP : FILTER)
++  row  + inputTuple);
 }
 evaluate = false;
 }



[02/34] phoenix git commit: PHOENIX-1681 Use the new Region Interface (Andrew Purtell)

2015-07-20 Thread greid
PHOENIX-1681 Use the new Region Interface (Andrew Purtell)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ea622d5f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ea622d5f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ea622d5f

Branch: refs/heads/4.x-HBase-1.1
Commit: ea622d5f7ab5c37d2ecf8be6054e5ed42f36a035
Parents: 98271b8
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:22:54 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:30:56 2015 -0700

--
 ...ReplayWithIndexWritesAndCompressedWALIT.java |  4 +-
 .../EndToEndCoveredColumnsIndexBuilderIT.java   |  4 +-
 .../IndexHalfStoreFileReaderGenerator.java  |  9 +-
 .../regionserver/IndexSplitTransaction.java | 65 +-
 .../hbase/regionserver/LocalIndexMerger.java| 16 ++--
 .../hbase/regionserver/LocalIndexSplitter.java  | 11 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 26 +++---
 .../GroupedAggregateRegionObserver.java | 13 +--
 .../coprocessor/MetaDataEndpointImpl.java   | 94 ++--
 .../phoenix/coprocessor/ScanRegionObserver.java | 17 ++--
 .../coprocessor/SequenceRegionObserver.java | 16 ++--
 .../UngroupedAggregateRegionObserver.java   | 29 +++---
 .../hbase/index/covered/data/LocalTable.java|  5 +-
 .../write/ParallelWriterIndexCommitter.java |  8 +-
 .../recovery/PerRegionIndexWriteCache.java  | 10 +--
 .../recovery/StoreFailuresInCachePolicy.java|  4 +-
 .../TrackingParallelWriterIndexCommitter.java   |  8 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  4 +-
 .../apache/phoenix/index/PhoenixIndexCodec.java | 14 ++-
 .../schema/stats/StatisticsCollector.java   | 14 +--
 .../phoenix/schema/stats/StatisticsScanner.java | 16 ++--
 .../phoenix/schema/stats/StatisticsWriter.java  | 16 ++--
 .../java/org/apache/phoenix/util/IndexUtil.java | 38 
 .../index/covered/TestLocalTableState.java  |  8 +-
 .../index/write/TestWALRecoveryCaching.java | 17 ++--
 .../recovery/TestPerRegionIndexWriteCache.java  |  6 +-
 26 files changed, 230 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 3b8ff29..611ba68 100644
--- 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -159,7 +159,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
   }
 
   /**
-   * Test writing edits into an HRegion, closing it, splitting logs, opening 
Region again. Verify
+   * Test writing edits into an region, closing it, splitting logs, opening 
Region again. Verify
* seqids.
* @throws Exception on failure
*/
@@ -183,7 +183,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
 builder.build(htd);
 
 // create the region + its WAL
-HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, htd);
+HRegion region0 = HRegion.createHRegion(hri, hbaseRootDir, this.conf, 
htd); // FIXME: Uses private type
 region0.close();
 region0.getWAL().close();
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea622d5f/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
index d90733f..6b2309e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.util.EnvironmentEdge;
 import 

[31/34] phoenix git commit: PHOENIX-1975 Detect and use HBASE_HOME when set

2015-07-20 Thread greid
PHOENIX-1975 Detect and use HBASE_HOME when set


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/980d29c5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/980d29c5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/980d29c5

Branch: refs/heads/4.x-HBase-1.1
Commit: 980d29c5acf785dc90ece1a7f047711e8d522a2e
Parents: 05b1b8b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 24 13:59:00 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 24 13:59:28 2015 -0700

--
 bin/phoenix_utils.py | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/980d29c5/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 383e0e1..bfb4737 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -65,7 +65,15 @@ def setPath():
 phoenix_class_path = os.getenv('PHOENIX_CLASS_PATH','')
 
 global hbase_conf_dir
-hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH', 
'.'))
+# if HBASE_CONF_DIR set explicitly, use that
+hbase_conf_dir = os.getenv('HBASE_CONF_DIR', os.getenv('HBASE_CONF_PATH'))
+if not hbase_conf_dir:
+# else fall back to HBASE_HOME
+if os.getenv('HBASE_HOME'):
+hbase_conf_dir = os.path.join(os.getenv('HBASE_HOME'), conf)
+else:
+# default to pwd
+hbase_conf_dir = '.'
 global hbase_conf_path # keep conf_path around for backward compatibility
 hbase_conf_path = hbase_conf_dir
 



[14/34] phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-07-20 Thread greid
PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c1882ee2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c1882ee2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c1882ee2

Branch: refs/heads/4.x-HBase-1.1
Commit: c1882ee279293b560fda9beb10ac50b8d3ead589
Parents: b2c0cb9
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:22:49 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1882ee2/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 828f776..3f73048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, ScannerContext scannerContext) 
throws IOException {



[16/34] phoenix git commit: PHOENIX-2016 Some Phoenix tests failed with NPE(Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-2016 Some Phoenix tests failed with NPE(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dc46b144
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dc46b144
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dc46b144

Branch: refs/heads/4.x-HBase-1.1
Commit: dc46b144aa9eaf315c3969669dab7f0a50d94281
Parents: eb9452d
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon Jun 1 21:34:16 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon Jun 1 21:34:16 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dc46b144/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index b0574c3..fa78656 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -1627,6 +1627,7 @@ public abstract class BaseTest {
  * Disable and drop all the tables except SYSTEM.CATALOG and 
SYSTEM.SEQUENCE
  */
 private static void disableAndDropNonSystemTables() throws Exception {
+if (driver == null) return;
 HBaseAdmin admin = driver.getConnectionQueryServices(null, 
null).getAdmin();
 try {
 HTableDescriptor[] tables = admin.listTables();



[30/34] phoenix git commit: PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing (Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-1935 org.apache.phoenix.end2end.ArithmeticQueryIT tests are failing 
(Alicia Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/05b1b8b1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/05b1b8b1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/05b1b8b1

Branch: refs/heads/4.x-HBase-1.1
Commit: 05b1b8b13f4137602567f67642946c883646d4d8
Parents: 329d749
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed Jun 17 12:28:35 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed Jun 17 12:31:28 2015 -0700

--
 .../src/it/java/org/apache/phoenix/end2end/BaseViewIT.java  | 2 ++
 phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java | 3 +++
 .../src/test/java/org/apache/phoenix/query/BaseTest.java| 5 -
 3 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index b9d7180..3140077 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -98,6 +98,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 assertEquals(1, rs.getInt(1));
 assertEquals(121, rs.getInt(2));
 assertFalse(rs.next());
+conn.close();
 }
 
 protected void testUpdatableViewIndex(Integer saltBuckets) throws 
Exception {
@@ -179,6 +180,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
 + CLIENT MERGE SORT,
 QueryUtil.getExplainPlan(rs));
 }
+conn.close();
 }
 
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 266438d..fb58a8f 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -92,8 +92,11 @@ public class ViewIT extends BaseViewIT {
 fail();
 } catch (ReadOnlyTableException e) {
 
+} finally {
+conn.close();
 }
 
+conn = DriverManager.getConnection(getUrl());
 int count = 0;
 ResultSet rs = conn.createStatement().executeQuery(SELECT k FROM v2);
 while (rs.next()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/05b1b8b1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index fa78656..3f09518 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -115,6 +115,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -1634,7 +1635,9 @@ public abstract class BaseTest {
 for (HTableDescriptor table : tables) {
 String schemaName = 
SchemaUtil.getSchemaNameFromFullName(table.getName());
 if (!QueryConstants.SYSTEM_SCHEMA_NAME.equals(schemaName)) {
-admin.disableTable(table.getName());
+try{
+admin.disableTable(table.getName());
+} catch (TableNotEnabledException ignored){}
 admin.deleteTable(table.getName());
 }
 }



[04/34] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-07-20 Thread greid
PHOENIX-1763 Support building with HBase-1.1.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/98271b88
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/98271b88
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/98271b88

Branch: refs/heads/4.x-HBase-1.1
Commit: 98271b888c113f10e174205434e05d3b36b7eb67
Parents: bf01eb2
Author: Enis Soztutar e...@apache.org
Authored: Thu May 21 23:08:26 2015 -0700
Committer: Enis Soztutar e...@apache.org
Committed: Fri May 22 00:30:56 2015 -0700

--
 phoenix-core/pom.xml| 17 +++--
 .../regionserver/IndexHalfStoreFileReader.java  | 31 ++--
 .../regionserver/IndexSplitTransaction.java | 39 --
 .../hbase/regionserver/LocalIndexMerger.java|  3 +-
 .../cache/aggcache/SpillableGroupByCache.java   | 13 +++-
 .../phoenix/coprocessor/BaseRegionScanner.java  | 12 +--
 .../coprocessor/BaseScannerRegionObserver.java  | 77 +++-
 .../coprocessor/DelegateRegionScanner.java  | 23 --
 .../GroupedAggregateRegionObserver.java | 53 --
 .../coprocessor/HashJoinRegionScanner.java  | 60 ---
 .../coprocessor/MetaDataRegionObserver.java | 23 +++---
 .../phoenix/coprocessor/ScanRegionObserver.java | 11 ++-
 .../UngroupedAggregateRegionObserver.java   | 55 +++---
 .../hbase/index/covered/data/LocalTable.java|  2 +-
 .../index/covered/filter/FamilyOnlyFilter.java  |  6 +-
 .../index/scanner/FilteredKeyValueScanner.java  |  2 +-
 .../phoenix/index/PhoenixIndexBuilder.java  |  6 +-
 .../iterate/RegionScannerResultIterator.java|  9 ++-
 .../phoenix/schema/stats/StatisticsScanner.java | 10 ++-
 .../hbase/ipc/PhoenixIndexRpcSchedulerTest.java |  6 +-
 .../index/covered/TestLocalTableState.java  |  1 -
 .../covered/filter/TestFamilyOnlyFilter.java| 12 +--
 .../index/write/TestWALRecoveryCaching.java |  4 +-
 phoenix-flume/pom.xml   |  9 ---
 phoenix-pig/pom.xml | 31 +---
 phoenix-spark/pom.xml   |  7 ++
 pom.xml | 41 ++-
 27 files changed, 361 insertions(+), 202 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 45b8d73..22e6b60 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -350,16 +350,25 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-annotations/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
 /dependency
 dependency
@@ -369,18 +378,16 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-server/artifactId
-  version${hbase.version}/version
   typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
@@ -391,13 +398,11 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReader.java
index 49e2022..9befc8c 100644
--- 

[05/34] phoenix git commit: PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-07-20 Thread greid
PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/56e1c0a1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/56e1c0a1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/56e1c0a1

Branch: refs/heads/4.x-HBase-1.1
Commit: 56e1c0a1f348572fb73e9d0b8bbfb053df7f8710
Parents: ea622d5
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sat May 23 23:29:31 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sat May 23 23:29:31 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/56e1c0a1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 54ae670..4aa28c4 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,6 +620,8 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
+// Remove this configuration once hbase has HBASE-13756 fix.
+conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[23/34] phoenix git commit: PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia Ying Shu)

2015-07-20 Thread greid
PHOENIX-2027 Subqueries with no data are raising IllegalStateException(Alicia 
Ying Shu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bfd860ff
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bfd860ff
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bfd860ff

Branch: refs/heads/4.x-HBase-1.1
Commit: bfd860ffec62a784f1229997cf98892ea3c0592d
Parents: 18b9e72
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed Jun 10 01:01:29 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed Jun 10 01:01:29 2015 +0530

--
 .../apache/phoenix/end2end/SortMergeJoinIT.java | 54 
 .../phoenix/execute/SortMergeJoinPlan.java  |  4 +-
 2 files changed, 56 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bfd860ff/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
index 6f14a45..8b65ab3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SortMergeJoinIT.java
@@ -2658,5 +2658,59 @@ public class SortMergeJoinIT extends 
BaseHBaseManagedTimeIT {
 }
 }
 
+@Test
+public void testSubqueryWithoutData() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+conn.setAutoCommit(false);
+
+try {
+String GRAMMAR_TABLE = CREATE TABLE IF NOT EXISTS GRAMMAR_TABLE 
(ID INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String LARGE_TABLE = CREATE TABLE IF NOT EXISTS LARGE_TABLE (ID 
INTEGER PRIMARY KEY,  +
+unsig_id UNSIGNED_INT, big_id BIGINT, unsig_long_id 
UNSIGNED_LONG, tiny_id TINYINT, +
+unsig_tiny_id UNSIGNED_TINYINT, small_id SMALLINT, 
unsig_small_id UNSIGNED_SMALLINT, + 
+float_id FLOAT, unsig_float_id UNSIGNED_FLOAT, double_id 
DOUBLE, unsig_double_id UNSIGNED_DOUBLE, + 
+decimal_id DECIMAL, boolean_id BOOLEAN, time_id TIME, 
date_id DATE, timestamp_id TIMESTAMP, + 
+unsig_time_id TIME, unsig_date_id DATE, 
unsig_timestamp_id TIMESTAMP, varchar_id VARCHAR (30), + 
+char_id CHAR (30), binary_id BINARY (100), varbinary_id 
VARBINARY (100));
+
+String SECONDARY_LARGE_TABLE = CREATE TABLE IF NOT EXISTS 
SECONDARY_LARGE_TABLE (SEC_ID INTEGER PRIMARY KEY, +
+sec_unsig_id UNSIGNED_INT, sec_big_id BIGINT, 
sec_usnig_long_id UNSIGNED_LONG, sec_tiny_id TINYINT, + 
+sec_unsig_tiny_id UNSIGNED_TINYINT, sec_small_id 
SMALLINT, sec_unsig_small_id UNSIGNED_SMALLINT, + 
+sec_float_id FLOAT, sec_unsig_float_id UNSIGNED_FLOAT, 
sec_double_id DOUBLE, sec_unsig_double_id UNSIGNED_DOUBLE, +
+sec_decimal_id DECIMAL, sec_boolean_id BOOLEAN, 
sec_time_id TIME, sec_date_id DATE, +
+sec_timestamp_id TIMESTAMP, sec_unsig_time_id TIME, 
sec_unsig_date_id DATE, sec_unsig_timestamp_id TIMESTAMP, +
+sec_varchar_id VARCHAR (30), sec_char_id CHAR (30), 
sec_binary_id BINARY (100), sec_varbinary_id VARBINARY (100));
+createTestTable(getUrl(), GRAMMAR_TABLE);
+createTestTable(getUrl(), LARGE_TABLE);
+createTestTable(getUrl(), SECONDARY_LARGE_TABLE);
+
+String ddl = SELECT /*+USE_SORT_MERGE_JOIN*/ * FROM (SELECT ID, 
BIG_ID, DATE_ID FROM LARGE_TABLE AS A WHERE (A.ID % 5) = 0) AS A  +
+INNER JOIN (SELECT SEC_ID, SEC_TINY_ID, 
SEC_UNSIG_FLOAT_ID FROM SECONDARY_LARGE_TABLE AS B WHERE (B.SEC_ID % 5) = 0) AS 
B  + 
+ON A.ID=B.SEC_ID WHERE A.DATE_ID  ALL (SELECT 
SEC_DATE_ID FROM SECONDARY_LARGE_TABLE LIMIT 100)  +  
+AND 

[25/34] phoenix git commit: PHOENIX-2033 PQS log environment details on launch

2015-07-20 Thread greid
PHOENIX-2033 PQS log environment details on launch


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e64f61ba
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e64f61ba
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e64f61ba

Branch: refs/heads/4.x-HBase-1.1
Commit: e64f61ba431b8db938bf60992bbde56f4c540946
Parents: f7d7349
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue Jun 9 17:12:21 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Fri Jun 12 09:38:42 2015 -0700

--
 .../apache/phoenix/queryserver/server/Main.java | 69 
 1 file changed, 69 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e64f61ba/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
--
diff --git 
a/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java 
b/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
index 55febc5..9f9bfc7 100644
--- 
a/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
+++ 
b/phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java
@@ -34,7 +34,12 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.RuntimeMXBean;
 import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
@@ -50,6 +55,11 @@ public final class Main extends Configured implements Tool, 
Runnable {
   phoenix.queryserver.http.port;
   public static final int DEFAULT_HTTP_PORT = 8765;
 
+  public static final String QUERY_SERVER_ENV_LOGGING_KEY =
+  phoenix.queryserver.envvars.logging.disabled;
+  public static final String QUERY_SERVER_ENV_LOGGING_SKIPWORDS_KEY =
+  phoenix.queryserver.envvars.logging.skipwords;
+
   public static final String KEYTAB_FILENAME_KEY = 
phoenix.queryserver.keytab.file;
   public static final String KERBEROS_PRINCIPAL_KEY = 
phoenix.queryserver.kerberos.principal;
   public static final String DNS_NAMESERVER_KEY = 
phoenix.queryserver.dns.nameserver;
@@ -58,12 +68,70 @@ public final class Main extends Configured implements Tool, 
Runnable {
 
   protected static final Log LOG = LogFactory.getLog(Main.class);
 
+  @SuppressWarnings(serial)
+  private static final SetString DEFAULT_SKIP_WORDS = new HashSetString() {
+{
+  add(secret);
+  add(passwd);
+  add(password);
+  add(credential);
+}
+  };
+
   private final String[] argv;
   private final CountDownLatch runningLatch = new CountDownLatch(1);
   private HttpServer server = null;
   private int retCode = 0;
   private Throwable t = null;
 
+  /**
+   * Log information about the currently running JVM.
+   */
+  public static void logJVMInfo() {
+// Print out vm stats before starting up.
+RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();
+if (runtime != null) {
+  LOG.info(vmName= + runtime.getVmName() + , vmVendor= +
+  runtime.getVmVendor() + , vmVersion= + runtime.getVmVersion());
+  LOG.info(vmInputArguments= + runtime.getInputArguments());
+}
+  }
+
+  /**
+   * Logs information about the currently running JVM process including
+   * the environment variables. Logging of env vars can be disabled by
+   * setting {@code phoenix.envvars.logging.disabled} to {@code true}.
+   * pIf enabled, you can also exclude environment variables containing
+   * certain substrings by setting {@code phoenix.envvars.logging.skipwords}
+   * to comma separated list of such substrings.
+   */
+  public static void logProcessInfo(Configuration conf) {
+// log environment variables unless asked not to
+if (conf == null || !conf.getBoolean(QUERY_SERVER_ENV_LOGGING_KEY, false)) 
{
+  SetString skipWords = new HashSetString(DEFAULT_SKIP_WORDS);
+  if (conf != null) {
+String[] confSkipWords = 
conf.getStrings(QUERY_SERVER_ENV_LOGGING_SKIPWORDS_KEY);
+if (confSkipWords != null) {
+  skipWords.addAll(Arrays.asList(confSkipWords));
+}
+  }
+
+  nextEnv:
+  for (Map.EntryString, String entry : System.getenv().entrySet()) {
+String key = entry.getKey().toLowerCase();
+String value = entry.getValue().toLowerCase();
+// exclude variables which may contain skip words
+for(String skipWord : skipWords) {
+  if (key.contains(skipWord) || value.contains(skipWord))
+continue nextEnv;
+}
+LOG.info(env:+entry);
+  }
+}
+// and 

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master b329e85b6 - b38a62431


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b38a6243
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b38a6243
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b38a6243

Branch: refs/heads/master
Commit: b38a62431ee44df171c913097d18e2433c951466
Parents: b329e85
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:25:01 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b38a6243/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b38a6243/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[09/34] phoenix git commit: Revert PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because HBASE-13756(Rajeshbabu)

2015-07-20 Thread greid
Revert PHOENIX-2008 Integration tests are failing with HBase-1.1.0 because 
HBASE-13756(Rajeshbabu)

This reverts commit 56e1c0a1f348572fb73e9d0b8bbfb053df7f8710.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1a2f2dc1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1a2f2dc1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1a2f2dc1

Branch: refs/heads/4.x-HBase-1.1
Commit: 1a2f2dc1ea6dd75c224b78a0dc2b312b0e6f5bce
Parents: 5546a42
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Wed May 27 14:52:25 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Wed May 27 14:52:25 2015 +0530

--
 phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1a2f2dc1/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
--
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 4aa28c4..54ae670 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -620,8 +620,6 @@ public abstract class BaseTest {
 }
 //no point doing sanity checks when running tests.
 conf.setBoolean(hbase.table.sanity.checks, false);
-// Remove this configuration once hbase has HBASE-13756 fix.
-conf.set(hbase.regionserver.msginterval, 30);
 // set the server rpc controller and rpc scheduler factory, used to 
configure the cluster
 conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY, 
DEFAULT_SERVER_RPC_CONTROLLER_FACTORY);
 conf.set(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, 
DEFAULT_RPC_SCHEDULER_FACTORY);



[34/34] phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/954a4a1b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/954a4a1b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/954a4a1b

Branch: refs/heads/4.x-HBase-1.1
Commit: 954a4a1b615e6c42dfaecbf2ff6b8174261ccea6
Parents: 89ab41c
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:57 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/954a4a1b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/954a4a1b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[11/34] phoenix git commit: PHOENIX-1964 - porting from master

2015-07-20 Thread greid
PHOENIX-1964 - porting from master


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c95e28df
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c95e28df
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c95e28df

Branch: refs/heads/4.x-HBase-1.1
Commit: c95e28df94241f47d5cfe9a1515b21960c93adf2
Parents: 0e0b4dd
Author: cmarcel cmar...@salesforce.com
Authored: Wed May 27 13:58:45 2015 -0700
Committer: cmarcel cmar...@salesforce.com
Committed: Wed May 27 13:58:45 2015 -0700

--
 phoenix-pherf/config/pherf.properties   |  3 ++
 .../org/apache/phoenix/pherf/DataIngestIT.java  |  3 +-
 .../apache/phoenix/pherf/ResultBaseTestIT.java  | 45 ++
 .../java/org/apache/phoenix/pherf/Pherf.java|  7 +--
 .../apache/phoenix/pherf/PherfConstants.java| 50 +++-
 .../phoenix/pherf/loaddata/DataLoader.java  |  2 +-
 .../apache/phoenix/pherf/result/ResultUtil.java |  4 +-
 .../pherf/result/impl/CSVResultHandler.java |  5 +-
 .../pherf/result/impl/ImageResultHandler.java   |  5 +-
 .../pherf/result/impl/XMLResultHandler.java |  6 ++-
 .../apache/phoenix/pherf/util/ResourceList.java | 26 --
 .../pherf/workload/WorkloadExecutor.java|  2 +-
 .../phoenix/pherf/ConfigurationParserTest.java  |  2 +-
 .../org/apache/phoenix/pherf/ResourceTest.java  |  8 ++--
 .../apache/phoenix/pherf/ResultBaseTest.java| 44 +
 .../org/apache/phoenix/pherf/ResultTest.java|  5 +-
 16 files changed, 168 insertions(+), 49 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/config/pherf.properties
--
diff --git a/phoenix-pherf/config/pherf.properties 
b/phoenix-pherf/config/pherf.properties
index 354707a..1142f9b5 100644
--- a/phoenix-pherf/config/pherf.properties
+++ b/phoenix-pherf/config/pherf.properties
@@ -29,3 +29,6 @@ pherf.default.dataloader.threadpool=0
 # When upserting, this is the max # of rows that will be inserted in a single 
commit
 pherf.default.dataloader.batchsize=1000
 
+# Directory where results from a scenario run will be written
+pherf.default.results.dir=RESULTS
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
index b29656d..2b56f43 100644
--- a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/DataIngestIT.java
@@ -18,7 +18,6 @@
 
 package org.apache.phoenix.pherf;
 
-import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.pherf.configuration.Column;
 import org.apache.phoenix.pherf.configuration.DataTypeMapping;
 import org.apache.phoenix.pherf.configuration.Scenario;
@@ -39,7 +38,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-public class DataIngestIT extends BaseHBaseManagedTimeIT {
+public class DataIngestIT extends ResultBaseTestIT {
 protected static PhoenixUtil util = new PhoenixUtil(true);
 static final String matcherScenario = .*scenario/.*test.*xml;
 static final String matcherSchema = .*datamodel/.*test.*sql;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c95e28df/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
--
diff --git 
a/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java 
b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
new file mode 100644
index 000..6e103b8
--- /dev/null
+++ b/phoenix-pherf/src/it/java/org/apache/phoenix/pherf/ResultBaseTestIT.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *   or more contributor license agreements.  See the NOTICE file
+ *   distributed with this work for additional information
+ *   regarding copyright ownership.  The ASF licenses this file
+ *   to you under the Apache License, Version 2.0 (the
+ *   License); you may not use this file except in compliance
+ *   with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *   Unless required by applicable law or agreed to in writing, software
+ *   distributed under the License is distributed on an AS IS BASIS,
+ *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *   See the License for the specific language governing 

[22/34] phoenix git commit: PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)

2015-07-20 Thread greid
PHOENIX-1978 UDF ArgumentTypeMismatchException(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/18b9e727
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/18b9e727
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/18b9e727

Branch: refs/heads/4.x-HBase-1.1
Commit: 18b9e72756642e127b2e227ea46a4f70401e6187
Parents: 58ee706
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Fri Jun 5 09:04:17 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Fri Jun 5 09:04:17 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java | 58 ++--
 phoenix-core/src/main/antlr3/PhoenixSQL.g   | 17 +++---
 2 files changed, 61 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/18b9e727/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 868e19d..c6bd62f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -58,6 +58,8 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.FunctionAlreadyExistsException;
 import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PArrayDataType;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
@@ -121,11 +123,31 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(
ptr.set(PInteger.INSTANCE.toBytes((Integer)sum));\n)
 .append(return true;\n)
 .append(}\n).toString();
-
+private static String ARRAY_INDEX_EVALUATE_METHOD =
+new StringBuffer()
+.append(public boolean evaluate(Tuple tuple, 
ImmutableBytesWritable ptr) {\n)
+.append(Expression indexExpr = 
children.get(1);\n)
+.append(if (!indexExpr.evaluate(tuple, ptr)) {\n)
+.append(   return false;\n)
+.append(} else if (ptr.getLength() == 0) {\n)
+.append(   return true;\n)
+.append(}\n)
+.append(// Use Codec to prevent Integer object 
allocation\n)
+.append(int index = 
PInteger.INSTANCE.getCodec().decodeInt(ptr, indexExpr.getSortOrder());\n)
+.append(if(index  0) {\n)
+.append(   throw new ParseException(\Index 
cannot be negative :\ + index);\n)
+.append(}\n)
+.append(Expression arrayExpr = 
children.get(0);\n)
+.append(return 
PArrayDataType.positionAtArrayElement(tuple, ptr, index, arrayExpr, 
getDataType(),getMaxLength());\n)
+.append(}\n).toString();
+
+
 private static String MY_REVERSE_CLASS_NAME = MyReverse;
 private static String MY_SUM_CLASS_NAME = MySum;
-private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, PVarchar);
-private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, PInteger);
+private static String MY_ARRAY_INDEX_CLASS_NAME = MyArrayIndex;
+private static String MY_REVERSE_PROGRAM = 
getProgram(MY_REVERSE_CLASS_NAME, STRING_REVERSE_EVALUATE_METHOD, return 
PVarchar.INSTANCE;);
+private static String MY_SUM_PROGRAM = getProgram(MY_SUM_CLASS_NAME, 
SUM_COLUMN_VALUES_EVALUATE_METHOD, return PInteger.INSTANCE;);
+private static String MY_ARRAY_INDEX_PROGRAM = 
getProgram(MY_ARRAY_INDEX_CLASS_NAME, ARRAY_INDEX_EVALUATE_METHOD, return 
PDataType.fromTypeId(children.get(0).getDataType().getSqlType()- 
PDataType.ARRAY_TYPE_BASE););
 private static Properties EMPTY_PROPS = new Properties();
 
 
@@ -144,6 +166,8 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 .append(import org.apache.phoenix.schema.types.PInteger;\n)
 .append(import org.apache.phoenix.schema.types.PVarchar;\n)
 .append(import org.apache.phoenix.util.StringUtil;\n)
+.append(import 

[17/34] phoenix git commit: PHOENIX-1976 Exit gracefully if addShutdownHook fails.

2015-07-20 Thread greid
PHOENIX-1976 Exit gracefully if addShutdownHook fails.

If the JVM is already in the process of shutting down,
we don't need to add the shutdown hook for the PhoenixDriver
instance. Additionally, we shouldn't advertise this instance
either since we're going down.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f2be9138
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f2be9138
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f2be9138

Branch: refs/heads/4.x-HBase-1.1
Commit: f2be9138359b078fd3e285f3fd441de711789962
Parents: dc46b14
Author: Josh Elser josh.el...@gmail.com
Authored: Thu May 14 17:40:46 2015 -0400
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 12:02:28 2015 -0700

--
 .../org/apache/phoenix/jdbc/PhoenixDriver.java  | 46 ++--
 1 file changed, 32 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2be9138/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
index 6360d06..cfabe82 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
@@ -60,25 +60,43 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 private static volatile String driverShutdownMsg;
 static {
 try {
-DriverManager.registerDriver( INSTANCE = new PhoenixDriver() );
-// Add shutdown hook to release any resources that were never 
closed
-// In theory not necessary, but it won't hurt anything
-Runtime.getRuntime().addShutdownHook(new Thread() {
-@Override
-public void run() {
-try {
-INSTANCE.close();
-} catch (SQLException e) {
-logger.warn(Unable to close PhoenixDriver on 
shutdown, e);
-} finally {
-driverShutdownMsg = Phoenix driver closed because 
server is shutting down;
+INSTANCE = new PhoenixDriver();
+try {
+// Add shutdown hook to release any resources that were never 
closed
+// In theory not necessary, but it won't hurt anything
+Runtime.getRuntime().addShutdownHook(new Thread() {
+@Override
+public void run() {
+closeInstance(INSTANCE);
 }
-}
-});
+});
+
+// Only register the driver when we successfully register the 
shutdown hook
+// Don't want to register it if we're already in the process 
of going down.
+DriverManager.registerDriver( INSTANCE );
+} catch (IllegalStateException e) {
+logger.warn(Failed to register PhoenixDriver shutdown hook as 
the JVM is already shutting down);
+
+// Close the instance now because we don't have the shutdown 
hook
+closeInstance(INSTANCE);
+
+throw e;
+}
 } catch (SQLException e) {
 throw new IllegalStateException(Unable to register  + 
PhoenixDriver.class.getName() + : + e.getMessage());
 }
 }
+
+private static void closeInstance(PhoenixDriver instance) {
+try {
+instance.close();
+} catch (SQLException e) {
+logger.warn(Unable to close PhoenixDriver on shutdown, e);
+} finally {
+driverShutdownMsg = Phoenix driver closed because server is 
shutting down;
+}
+}
+
 // One entry per cluster here
 private final ConcurrentMapConnectionInfo,ConnectionQueryServices 
connectionQueryServicesMap = new 
ConcurrentHashMapConnectionInfo,ConnectionQueryServices(3);
 



[18/34] phoenix git commit: PHOENIX-1962 Apply check style to the build

2015-07-20 Thread greid
PHOENIX-1962 Apply check style to the build


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/29ea5035
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/29ea5035
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/29ea5035

Branch: refs/heads/4.x-HBase-1.1
Commit: 29ea503546265a619ce501c477a109b69f940a00
Parents: f2be913
Author: Nick Dimiduk ndimi...@apache.org
Authored: Sat May 9 11:10:54 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 1 12:21:48 2015 -0700

--
 phoenix-assembly/pom.xml|   4 +
 phoenix-core/pom.xml|   4 +
 phoenix-flume/pom.xml   |   4 +
 phoenix-pherf/pom.xml   |   1 +
 phoenix-pig/pom.xml |   4 +
 phoenix-server-client/pom.xml   |   4 +
 phoenix-server/pom.xml  |   4 +
 phoenix-spark/pom.xml   |   1 +
 pom.xml |  23 ++
 src/main/config/checkstyle/checker.xml  | 281 +++
 src/main/config/checkstyle/header.txt   |  16 ++
 src/main/config/checkstyle/suppressions.xml |  46 
 12 files changed, 392 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 04d9335..d275d03 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -33,6 +33,10 @@
   descriptionAssemble Phoenix artifacts/description
   packagingpom/packaging
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 951e969..6302441 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -24,6 +24,10 @@
   urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 resources
   resource

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index ea87ab0..c7f0650 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index e751d73..dd45075 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -30,6 +30,7 @@
 namePhoenix - Pherf/name
 
 properties
+  top.dir${project.basedir}/../top.dir
 /properties
 
 profiles

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 957c06f..55b34d3 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -31,6 +31,10 @@
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   dependencies
 dependency
   groupIdorg.apache.phoenix/groupId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 748e57c..3e54a07 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin

http://git-wip-us.apache.org/repos/asf/phoenix/blob/29ea5035/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index ab9a472..86b2525 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -24,6 +24,10 @@
 urlhttp://www.apache.org/url
   /organization
 
+  properties
+top.dir${project.basedir}/../top.dir
+  /properties
+
   build
 plugins
   plugin


[21/34] phoenix git commit: PHOENIX-777 - Support null value for fixed length ARRAY - Addendum (Ram)

2015-07-20 Thread greid
PHOENIX-777 - Support null value for fixed length ARRAY - Addendum (Ram)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/58ee7062
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/58ee7062
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/58ee7062

Branch: refs/heads/4.x-HBase-1.1
Commit: 58ee7062c624dd72a5cdaa41ec5b107a1e7b14c2
Parents: 6f890ad
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 14:32:02 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 14:36:05 2015 +0530

--
 .../main/java/org/apache/phoenix/schema/types/PTimestamp.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/58ee7062/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
index d396adc..16b110e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/types/PTimestamp.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.DateUtil;
 
 public class PTimestamp extends PDataTypeTimestamp {
@@ -47,6 +48,10 @@ public class PTimestamp extends PDataTypeTimestamp {
   @Override
   public int toBytes(Object object, byte[] bytes, int offset) {
 if (object == null) {
+  // Create the byte[] of size MAX_TIMESTAMP_BYTES
+  if(bytes.length != getByteSize()) {
+  bytes = Bytes.padTail(bytes, (getByteSize() - bytes.length));
+  }
   PDate.INSTANCE.getCodec().encodeLong(0l, bytes, offset);
   Bytes.putInt(bytes, offset + Bytes.SIZEOF_LONG, 0);
   return getByteSize();



[13/34] phoenix git commit: PHOENIX-2010 Properly validate number of arguments passed to the functions in FunctionParseNode#validate(Rajeshbabu)

2015-07-20 Thread greid
PHOENIX-2010 Properly validate number of arguments passed to the functions in 
FunctionParseNode#validate(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b2c0cb90
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b2c0cb90
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b2c0cb90

Branch: refs/heads/4.x-HBase-1.1
Commit: b2c0cb9002ee881f21d968817c386a98d39074ca
Parents: a600cc4
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Sun May 31 07:40:39 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Sun May 31 07:40:39 2015 +0530

--
 .../phoenix/end2end/UserDefinedFunctionsIT.java   | 14 ++
 .../org/apache/phoenix/parse/FunctionParseNode.java   |  4 
 .../main/java/org/apache/phoenix/parse/PFunction.java |  4 +---
 3 files changed, 19 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
index 7dbde3c..868e19d 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UserDefinedFunctionsIT.java
@@ -442,6 +442,20 @@ public class UserDefinedFunctionsIT extends 
BaseOwnClusterIT{
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
+try {
+rs = stmt.executeQuery(select k from t9 where 
mysum9(k,10,'x')=11);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
+try {
+rs = stmt.executeQuery(select mysum9() from t9);
+fail(FunctionNotFoundException should be thrown);
+} catch(FunctionNotFoundException e) {
+} catch(Exception e) {
+fail(FunctionNotFoundException should be thrown);
+}
 stmt.execute(drop function mysum9);
 try {
 rs = stmt.executeQuery(select k from t9 where mysum9(k)=11);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
index d1001ee..be52d89 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
@@ -41,6 +41,7 @@ import 
org.apache.phoenix.expression.function.FunctionExpression;
 import org.apache.phoenix.expression.function.UDFExpression;
 import org.apache.phoenix.parse.PFunction.FunctionArgument;
 import org.apache.phoenix.schema.ArgumentTypeMismatchException;
+import org.apache.phoenix.schema.FunctionNotFoundException;
 import org.apache.phoenix.schema.ValueRangeExcpetion;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDataTypeFactory;
@@ -133,6 +134,9 @@ public class FunctionParseNode extends CompoundParseNode {
 public ListExpression validate(ListExpression children, 
StatementContext context) throws SQLException {
 BuiltInFunctionInfo info = this.getInfo();
 BuiltInFunctionArgInfo[] args = info.getArgs();
+if (args.length  children.size() || info.getRequiredArgCount()  
children.size()) {
+throw new FunctionNotFoundException(this.name);
+}
 if (args.length  children.size()) {
 ListExpression moreChildren = new 
ArrayListExpression(children);
 for (int i = children.size(); i  info.getArgs().length; i++) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b2c0cb90/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
index 351bec7..aeed3ac 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/PFunction.java
@@ -96,9 +96,7 @@ public class PFunction implements PMetaDataEntity {
 }
 
 public 

[10/34] phoenix git commit: PHOENIX-2013 Apply PHOENIX-1995 to runnable uberjar as well

2015-07-20 Thread greid
PHOENIX-2013 Apply PHOENIX-1995 to runnable uberjar as well


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0e0b4ddb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0e0b4ddb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0e0b4ddb

Branch: refs/heads/4.x-HBase-1.1
Commit: 0e0b4ddb4d130b38c7aa28d2e31b0a9552087256
Parents: 1a2f2dc
Author: Nick Dimiduk ndimi...@apache.org
Authored: Wed May 27 11:27:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Wed May 27 13:20:32 2015 -0700

--
 phoenix-server/src/build/query-server-runnable.xml | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0e0b4ddb/phoenix-server/src/build/query-server-runnable.xml
--
diff --git a/phoenix-server/src/build/query-server-runnable.xml 
b/phoenix-server/src/build/query-server-runnable.xml
index e2a3dc4..ef22b14 100644
--- a/phoenix-server/src/build/query-server-runnable.xml
+++ b/phoenix-server/src/build/query-server-runnable.xml
@@ -28,6 +28,15 @@
 formatjar/format
   /formats
   includeBaseDirectoryfalse/includeBaseDirectory
+  containerDescriptorHandlers
+containerDescriptorHandler
+  !--
+  aggregate SPI's so that things like HDFS FileSystem works in uberjar
+  http://docs.oracle.com/javase/tutorial/sound/SPI-intro.html
+  --
+  handlerNamemetaInf-services/handlerName
+/containerDescriptorHandler
+  /containerDescriptorHandlers
   dependencySets
 dependencySet
   outputDirectory//outputDirectory



[27/34] phoenix git commit: PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)

2015-07-20 Thread greid
PHOENIX-2040 Mark spark/scala dependencies as 'provided' (Josh Mahonin)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/43c722ca
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/43c722ca
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/43c722ca

Branch: refs/heads/4.x-HBase-1.1
Commit: 43c722ca6d2d55347d1f2caf7641ce03339e1e1e
Parents: d0bcb7b
Author: Nick Dimiduk ndimi...@apache.org
Authored: Mon Jun 15 16:16:03 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Mon Jun 15 16:16:30 2015 -0700

--
 phoenix-assembly/pom.xml |  4 
 phoenix-spark/pom.xml| 51 ---
 2 files changed, 32 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/43c722ca/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index ebc5d71..d275d03 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -152,6 +152,10 @@
 /dependency
 dependency
   groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-spark/artifactId
+/dependency
+dependency
+  groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-server/artifactId
 /dependency
 dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/43c722ca/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 1747573..aea5c7e 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -45,12 +45,7 @@
   groupIdorg.apache.phoenix/groupId
   artifactIdphoenix-core/artifactId
 /dependency
-dependency
-  groupIdorg.apache.phoenix/groupId
-  artifactIdphoenix-core/artifactId
-  classifiertests/classifier
-  scopetest/scope
-/dependency
+
 !-- Force import of Spark's servlet API for unit tests --
 dependency
   groupIdjavax.servlet/groupId
@@ -59,16 +54,38 @@
   scopetest/scope
 /dependency
 
+!-- Mark Spark / Scala as provided --
 dependency
-  groupIdjunit/groupId
-  artifactIdjunit/artifactId
+  groupIdorg.scala-lang/groupId
+  artifactIdscala-library/artifactId
+  version${scala.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-core_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+dependency
+  groupIdorg.apache.spark/groupId
+  artifactIdspark-sql_${scala.binary.version}/artifactId
+  version${spark.version}/version
+  scopeprovided/scope
+/dependency
+
+!-- Test dependencies --
+dependency
+  groupIdorg.apache.phoenix/groupId
+  artifactIdphoenix-core/artifactId
+  classifiertests/classifier
   scopetest/scope
 /dependency
 
 dependency
-  groupIdorg.scala-lang/groupId
-  artifactIdscala-library/artifactId
-  version${scala.version}/version
+  groupIdjunit/groupId
+  artifactIdjunit/artifactId
+  scopetest/scope
 /dependency
 
 dependency
@@ -86,18 +103,6 @@
 /dependency
 
 dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-core_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
-  groupIdorg.apache.spark/groupId
-  artifactIdspark-sql_${scala.binary.version}/artifactId
-  version${spark.version}/version
-/dependency
-
-dependency
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-client/artifactId
   version${hadoop-two.version}/version



[20/34] phoenix git commit: PHOENIX-777 - Support null value for fixed length ARRAY (Dumindu Buddhika)

2015-07-20 Thread greid
PHOENIX-777 - Support null value for fixed length ARRAY (Dumindu Buddhika)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6f890ade
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6f890ade
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6f890ade

Branch: refs/heads/4.x-HBase-1.1
Commit: 6f890ade0691d03469ff8fce81c2fa9edd6941af
Parents: 9c5f111
Author: ramkrishna ramkrishna.s.vasude...@gmail.com
Authored: Tue Jun 2 11:18:51 2015 +0530
Committer: ramkrishna ramkrishna.s.vasude...@gmail.com
Committed: Tue Jun 2 11:18:51 2015 +0530

--
 .../phoenix/end2end/ArraysWithNullsIT.java  | 300 +++
 .../phoenix/compile/ExpressionCompiler.java |   9 +-
 .../apache/phoenix/schema/types/PBinary.java|   2 +-
 .../org/apache/phoenix/schema/types/PChar.java  |   5 +-
 .../org/apache/phoenix/schema/types/PDate.java  |   6 +-
 .../apache/phoenix/schema/types/PDecimal.java   |   3 +
 .../apache/phoenix/schema/types/PTimestamp.java |  17 +-
 .../phoenix/schema/types/PhoenixArray.java  |  51 ++--
 8 files changed, 358 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f890ade/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
new file mode 100644
index 000..b034193
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertEquals;
+
+import java.sql.*;
+
+import org.apache.phoenix.schema.types.PTimestamp;
+import org.apache.phoenix.schema.types.PhoenixArray;
+import org.junit.Test;
+
+public class ArraysWithNullsIT extends BaseClientManagedTimeIT {
+
+@Test
+public void testArrayUpsertIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t1 ( k VARCHAR PRIMARY 
KEY, a INTEGER[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t1 
VALUES('a',ARRAY[null,3,null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t1 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(INTEGER,new Object[]{null,3,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+
+
+@Test
+public void testArrayUpsertVarcharWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t2 ( k VARCHAR PRIMARY 
KEY, a VARCHAR[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t2 
VALUES('a',ARRAY['10',null]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t2 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(VARCHAR,new Object[]{10,null});
+
+assertEquals(rs.getArray(1),array);
+conn.close();
+
+}
+
+@Test
+public void testArrayUpsertBigIntWithNulls() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(CREATE TABLE t3 ( k VARCHAR PRIMARY 
KEY, a BIGINT[]));
+
+PreparedStatement stmt = conn.prepareStatement(UPSERT INTO t3 
VALUES('a',ARRAY[2,null,32335,4]));
+stmt.execute();
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(Select a from t3 
where k = 'a');
+rs.next();
+Array array = conn.createArrayOf(BIGINT,new 
Object[]{(long)2,null,(long)32335,(long)4});
+
+assertEquals(rs.getArray(1),array);
+conn.close();

[08/34] phoenix git commit: PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)

2015-07-20 Thread greid
PHOENIX-2005 Connection utilities omit zk client port, parent znode (addendum)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5546a422
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5546a422
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5546a422

Branch: refs/heads/4.x-HBase-1.1
Commit: 5546a42226e3f0fdf0cc89f1c175ff3da7a75d8c
Parents: c6b37b9
Author: Nick Dimiduk ndimi...@apache.org
Authored: Tue May 26 17:41:04 2015 -0700
Committer: Nick Dimiduk ndimi...@apache.org
Committed: Tue May 26 17:52:24 2015 -0700

--
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java |  2 +-
 .../java/org/apache/phoenix/util/QueryUtil.java |  2 +-
 .../phoenix/jdbc/PhoenixEmbeddedDriverTest.java | 20 
 3 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 2451603..3cfaacc 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -209,7 +209,7 @@ public abstract class PhoenixEmbeddedDriver implements 
Driver, org.apache.phoeni
 url = url == null ?  : url;
 url = url.startsWith(PhoenixRuntime.JDBC_PROTOCOL)
 ? url.substring(PhoenixRuntime.JDBC_PROTOCOL.length())
-: url;
+: PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + url;
 StringTokenizer tokenizer = new StringTokenizer(url, DELIMITERS, 
true);
 int nTokens = 0;
 String[] tokens = new String[5];

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
index bd38983..a2d4a91 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java
@@ -290,7 +290,7 @@ public final class QueryUtil {
 throws ClassNotFoundException,
 SQLException {
 String url = getConnectionUrl(props, conf);
-LOG.info(Creating connection with the jdbc url: + url);
+LOG.info(Creating connection with the jdbc url:  + url);
 PropertiesUtil.extractProperties(props, conf);
 return DriverManager.getConnection(url, props);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5546a422/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
index 083b205..4eda825 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriverTest.java
@@ -34,23 +34,33 @@ public class PhoenixEmbeddedDriverTest {
 @Test
 public void testGetConnectionInfo() throws SQLException {
 String[] urls = new String[] {
+null,
+,
 jdbc:phoenix,
 jdbc:phoenix;test=true,
 jdbc:phoenix:localhost,
+localhost,
+localhost;,
 jdbc:phoenix:localhost:123,
 jdbc:phoenix:localhost:123;foo=bar,
+localhost:123,
 jdbc:phoenix:localhost:123:/hbase,
 jdbc:phoenix:localhost:123:/foo-bar,
 jdbc:phoenix:localhost:123:/foo-bar;foo=bas,
+localhost:123:/foo-bar,
 jdbc:phoenix:localhost:/hbase,
 jdbc:phoenix:localhost:/foo-bar,
 jdbc:phoenix:localhost:/foo-bar;test=true,
+localhost:/foo-bar,
 jdbc:phoenix:v1,v2,v3,
 jdbc:phoenix:v1,v2,v3;,
 jdbc:phoenix:v1,v2,v3;test=true,
+v1,v2,v3,
 jdbc:phoenix:v1,v2,v3:/hbase,
 jdbc:phoenix:v1,v2,v3:/hbase;test=true,
+v1,v2,v3:/foo-bar,
 jdbc:phoenix:v1,v2,v3:123:/hbase,
+v1,v2,v3:123:/hbase,
 jdbc:phoenix:v1,v2,v3:123:/hbase;test=false,
 

phoenix git commit: PHOENIX-2131 Closing paren in CastParseNode SQL

2015-07-20 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 73da0fb0d - 8eb9afeb6


PHOENIX-2131 Closing paren in CastParseNode SQL

Add a missing closing parenthesis in CastParseNode.toSQL.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8eb9afeb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8eb9afeb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8eb9afeb

Branch: refs/heads/4.x-HBase-0.98
Commit: 8eb9afeb6d0024265c8a8526218ac1c35076ec80
Parents: 73da0fb
Author: Gabriel Reid gr...@apache.org
Authored: Sun Jul 19 17:46:48 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jul 20 15:24:35 2015 +0200

--
 .../org/apache/phoenix/parse/CastParseNode.java |  2 +-
 .../apache/phoenix/parse/CastParseNodeTest.java | 57 
 2 files changed, 58 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8eb9afeb/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java 
b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index 78be616..3e03613 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -133,7 +133,7 @@ public class CastParseNode extends UnaryParseNode {
 if (isArray) {
 buf.append(' ');
 buf.append(PDataType.ARRAY_TYPE_SUFFIX);
-buf.append(' ');
 }
+buf.append());
 }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8eb9afeb/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
new file mode 100644
index 000..b62d9a9
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/CastParseNodeTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.parse;
+
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PDecimal;
+import org.apache.phoenix.schema.types.PDouble;
+import org.apache.phoenix.schema.types.PLong;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class CastParseNodeTest {
+
+@Test
+public void testToSQL() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT), stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_WithLengthAndScale() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PDecimal.INSTANCE, 5, 3, false);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS DECIMAL(5,3)), 
stringBuilder.toString());
+}
+
+@Test
+public void testToSQL_ArrayType() {
+ColumnParseNode columnParseNode = new 
ColumnParseNode(TableName.create(SCHEMA1, TABLE1), V);
+CastParseNode castParseNode = new CastParseNode(columnParseNode, 
PLong.INSTANCE, null, null, true);
+StringBuilder stringBuilder = new StringBuilder();
+castParseNode.toSQL(null, stringBuilder);
+assertEquals( CAST(TABLE1.V AS BIGINT ARRAY), 
stringBuilder.toString());
+}
+}
\ No newline at end of file



[03/34] phoenix git commit: PHOENIX-1763 Support building with HBase-1.1.0

2015-07-20 Thread greid
http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 2db1af6..015a660 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -54,7 +54,6 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-testing-util/artifactId
-  version${hbase.version}/version
   scopetest/scope
   optionaltrue/optional
   exclusions
@@ -67,7 +66,6 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
   exclusions
@@ -80,41 +78,56 @@
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-common/artifactId
-  version${hbase.version}/version
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-common/artifactId
+  scopetest/scope
+  typetest-jar/type
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-protocol/artifactId
-  version${hbase.version}/version
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-client/artifactId
-  version${hbase.version}/version
+/dependency
+   dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+  typetest-jar/type
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
+  artifactIdhbase-client/artifactId
+  typetest-jar/type
+  scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   scopetest/scope
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
   artifactIdhbase-hadoop2-compat/artifactId
-  version${hbase.version}/version
   typetest-jar/type
   scopetest/scope
 /dependency

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/phoenix-spark/pom.xml
--
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index adeed88..a232cf4 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -460,6 +460,13 @@
 /dependency
 dependency
   groupIdorg.apache.hbase/groupId
+  artifactIdhbase-server/artifactId
+  version${hbase.version}/version
+  scopetest/scope
+  typetest-jar/type
+/dependency
+dependency
+  groupIdorg.apache.hbase/groupId
   artifactIdhbase-it/artifactId
   version${hbase.version}/version
   typetest-jar/type

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98271b88/pom.xml
--
diff --git a/pom.xml b/pom.xml
index d310c37..4361e54 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,7 +78,7 @@
 test.output.tofiletrue/test.output.tofile
 
 !-- Hadoop Versions --
-hbase.version1.0.1/hbase.version
+hbase.version1.1.0/hbase.version
 hadoop-two.version2.5.1/hadoop-two.version
 
 !-- Dependency versions --
@@ -452,6 +452,11 @@
   !-- HBase dependencies --
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-annotations/artifactId
+version${hbase.version}/version
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
 artifactIdhbase-testing-util/artifactId
 version${hbase.version}/version
 scopetest/scope
@@ -488,13 +493,34 @@
   /dependency
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-common/artifactId
+version${hbase.version}/version
+typetest-jar/type
+scopetest/scope
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
 artifactIdhbase-client/artifactId
 version${hbase.version}/version
   /dependency
   dependency
 groupIdorg.apache.hbase/groupId
+artifactIdhbase-client/artifactId
+version${hbase.version}/version
+typetest-jar/type
+scopetest/scope
+  /dependency
+  dependency
+groupIdorg.apache.hbase/groupId
+artifactIdhbase-server/artifactId
+version${hbase.version}/version
+  /dependency
+  dependency
+

[24/34] phoenix git commit: PHOENIX-1968: Should support saving arrays

2015-07-20 Thread greid
PHOENIX-1968: Should support saving arrays


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f7d73496
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f7d73496
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f7d73496

Branch: refs/heads/4.x-HBase-1.1
Commit: f7d734966f7172c3bc4a6f0ba31594ba74ee91a1
Parents: bfd860f
Author: ravimagham ravimag...@apache.org
Authored: Thu Jun 11 12:59:48 2015 -0700
Committer: ravimagham ravimag...@apache.org
Committed: Thu Jun 11 12:59:48 2015 -0700

--
 .../apache/phoenix/spark/PhoenixSparkIT.scala   | 21 
 .../phoenix/spark/PhoenixRecordWritable.scala   | 25 
 2 files changed, 41 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7d73496/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
--
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 42e8676..5f256e6 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -415,4 +415,25 @@ class PhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
 
 results.toList shouldEqual checkResults
   }
+
+  test(Can save arrays back to phoenix) {
+val dataSet = List((2L, Array(String1, String2, String3)))
+
+sc
+  .parallelize(dataSet)
+  .saveToPhoenix(
+ARRAY_TEST_TABLE,
+Seq(ID,VCARRAY),
+zkUrl = Some(quorumAddress)
+  )
+
+// Load the results back
+val stmt = conn.createStatement()
+val rs = stmt.executeQuery(SELECT VCARRAY FROM ARRAY_TEST_TABLE WHERE ID 
= 2)
+rs.next()
+val sqlArray = rs.getArray(1).getArray().asInstanceOf[Array[String]]
+
+// Verify the arrays are equal
+sqlArray shouldEqual dataSet(0)._2
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7d73496/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
--
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
index 67e0bd2..3977657 100644
--- 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
+++ 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRecordWritable.scala
@@ -16,11 +16,12 @@ package org.apache.phoenix.spark
 import java.sql.{PreparedStatement, ResultSet}
 import org.apache.hadoop.mapreduce.lib.db.DBWritable
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder
-import org.apache.phoenix.schema.types.{PDate, PhoenixArray}
+import org.apache.phoenix.schema.types.{PDataType, PDate, PhoenixArray}
 import org.joda.time.DateTime
 import scala.collection.{immutable, mutable}
 import scala.collection.JavaConversions._
 
+
 class PhoenixRecordWritable(var encodedColumns: String) extends DBWritable {
   val upsertValues = mutable.ArrayBuffer[Any]()
   val resultMap = mutable.Map[String, AnyRef]()
@@ -44,13 +45,27 @@ class PhoenixRecordWritable(var encodedColumns: String) 
extends DBWritable {
 upsertValues.zip(columns).zipWithIndex.foreach {
   case ((v, c), i) = {
 if (v != null) {
+
   // Both Java and Joda dates used to work in 4.2.3, but now they must 
be java.sql.Date
+  // Can override any other types here as needed
   val (finalObj, finalType) = v match {
-case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE.getSqlType)
-case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE.getSqlType)
-case _ = (v, c.getSqlType)
+case dt: DateTime = (new java.sql.Date(dt.getMillis), 
PDate.INSTANCE)
+case d: java.util.Date = (new java.sql.Date(d.getTime), 
PDate.INSTANCE)
+case _ = (v, c.getPDataType)
+  }
+
+  // Save as array or object
+  finalObj match {
+case obj: Array[AnyRef] = {
+  // Create a java.sql.Array, need to lookup the base sql type name
+  val sqlArray = statement.getConnection.createArrayOf(
+PDataType.arrayBaseType(finalType).getSqlTypeName,
+obj
+  )
+  statement.setArray(i + 1, sqlArray)
+}
+case _ = statement.setObject(i + 1, finalObj)
   }
-  statement.setObject(i + 1, finalObj, finalType)
 } else {
   

[06/34] phoenix git commit: Changing version to 4.5.0-HBase-1.1-SNAPSHOT

2015-07-20 Thread greid
Changing version to 4.5.0-HBase-1.1-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3cdc3230
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3cdc3230
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3cdc3230

Branch: refs/heads/4.x-HBase-1.1
Commit: 3cdc3230c570ee8c22bb6c1bab975699fd02e94c
Parents: 56e1c0a
Author: Rajeshbabu Chintaguntla rajeshb...@apache.org
Authored: Mon May 25 17:46:18 2015 +0530
Committer: Rajeshbabu Chintaguntla rajeshb...@apache.org
Committed: Mon May 25 17:46:18 2015 +0530

--
 phoenix-assembly/pom.xml  | 2 +-
 phoenix-core/pom.xml  | 2 +-
 phoenix-flume/pom.xml | 2 +-
 phoenix-pherf/pom.xml | 2 +-
 phoenix-pig/pom.xml   | 2 +-
 phoenix-server-client/pom.xml | 2 +-
 phoenix-server/pom.xml| 2 +-
 phoenix-spark/pom.xml | 2 +-
 pom.xml   | 2 +-
 9 files changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 8d9a965..04d9335 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-assembly/artifactId
   namePhoenix Assembly/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 22e6b60..951e969 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-core/artifactId
   namePhoenix Core/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index b2b9a47..ea87ab0 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-flume/artifactId
   namePhoenix - Flume/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 0901f71..e751d73 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -22,7 +22,7 @@
 parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
 /parent
 
 artifactIdphoenix-pherf/artifactId

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-pig/pom.xml
--
diff --git a/phoenix-pig/pom.xml b/phoenix-pig/pom.xml
index 015a660..957c06f 100644
--- a/phoenix-pig/pom.xml
+++ b/phoenix-pig/pom.xml
@@ -26,7 +26,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-pig/artifactId
   namePhoenix - Pig/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-server-client/pom.xml
--
diff --git a/phoenix-server-client/pom.xml b/phoenix-server-client/pom.xml
index 4d6fd45..748e57c 100644
--- a/phoenix-server-client/pom.xml
+++ b/phoenix-server-client/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+version4.5.0-HBase-1.1-SNAPSHOT/version
   /parent
   artifactIdphoenix-server-client/artifactId
   namePhoenix Query Server Client/name

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3cdc3230/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index 9f6289f..ab9a472 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -4,7 +4,7 @@
   parent
 groupIdorg.apache.phoenix/groupId
 artifactIdphoenix/artifactId
-version4.4.0-SNAPSHOT/version
+

phoenix git commit: LP-1277 Support nulls in CHAR fields in CSV loader

2015-06-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 c40d64b3b - 89ab41c1c


LP-1277 Support nulls in CHAR fields in CSV loader


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/89ab41c1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/89ab41c1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/89ab41c1

Branch: refs/heads/4.x-HBase-1.1
Commit: 89ab41c1c22a3b34c2571fd950c0936440b6f822
Parents: c40d64b
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Jun 25 21:36:51 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 29 08:35:10 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java   | 18 +++---
 .../org/apache/phoenix/schema/types/PChar.java|  3 ---
 .../phoenix/util/csv/CsvUpsertExecutor.java   |  5 -
 3 files changed, 15 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/89ab41c1/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index d07ed8d..c7287ea 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -46,9 +46,10 @@ import org.junit.Test;
 public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT {
 
 private static final String DATATYPE_TABLE = DATATYPE;
-private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
-+ KEY1,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
-+ 
KEY2,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n;
+private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, CCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
++ KEY1,A,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
++ 
KEY2,B,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n
++ KEY3,,\n;
 private static final String STOCK_TABLE = STOCK_SYMBOL;
 private static final String STOCK_TABLE_MULTI = STOCK_SYMBOL_MULTI;
 private static final String STOCK_CSV_VALUES = AAPL,APPLE Inc.\n
@@ -480,7 +481,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 String statements = CREATE TABLE IF NOT EXISTS 
 + DATATYPE_TABLE
 +  (CKEY VARCHAR NOT NULL PRIMARY KEY,
-+   CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL 
DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, 
CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
++   CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, 
CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT 
BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
 conn = DriverManager.getConnection(getUrl())
 .unwrap(PhoenixConnection.class);
 PhoenixRuntime.executeStatements(conn,
@@ -493,7 +494,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Compare Phoenix ResultSet with CSV file content
 PreparedStatement statement = conn
-.prepareStatement(SELECT CKEY, CVARCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
+.prepareStatement(SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
 + DATATYPE_TABLE);
 ResultSet phoenixResultSet = statement.executeQuery();
 parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES),
@@ -511,9 +512,12 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 i++;
 }
 // special case for matching date, time values
-assertEquals(DateUtil.parseTime(record.get(8)),
+String timeFieldValue = record.get(9);
+assertEquals(timeFieldValue.isEmpty() ? null : 
DateUtil.parseTime(record.get(9)),
 phoenixResultSet.getTime(CTIME));
-assertEquals(DateUtil.parseDate(record.get(9)),
+
+String dateField 

phoenix git commit: LP-1277 Support nulls in CHAR fields in CSV loader

2015-06-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 0f6595c0c - 38ae6b754


LP-1277 Support nulls in CHAR fields in CSV loader


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/38ae6b75
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/38ae6b75
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/38ae6b75

Branch: refs/heads/master
Commit: 38ae6b754a77fd967d601e89711349e8c2e22577
Parents: 0f6595c
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Jun 25 21:36:51 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 29 08:38:52 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java   | 18 +++---
 .../org/apache/phoenix/schema/types/PChar.java|  3 ---
 .../phoenix/util/csv/CsvUpsertExecutor.java   |  5 -
 3 files changed, 15 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/38ae6b75/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index d07ed8d..c7287ea 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -46,9 +46,10 @@ import org.junit.Test;
 public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT {
 
 private static final String DATATYPE_TABLE = DATATYPE;
-private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
-+ KEY1,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
-+ 
KEY2,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n;
+private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, CCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
++ KEY1,A,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
++ 
KEY2,B,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n
++ KEY3,,\n;
 private static final String STOCK_TABLE = STOCK_SYMBOL;
 private static final String STOCK_TABLE_MULTI = STOCK_SYMBOL_MULTI;
 private static final String STOCK_CSV_VALUES = AAPL,APPLE Inc.\n
@@ -480,7 +481,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 String statements = CREATE TABLE IF NOT EXISTS 
 + DATATYPE_TABLE
 +  (CKEY VARCHAR NOT NULL PRIMARY KEY,
-+   CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL 
DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, 
CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
++   CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, 
CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT 
BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
 conn = DriverManager.getConnection(getUrl())
 .unwrap(PhoenixConnection.class);
 PhoenixRuntime.executeStatements(conn,
@@ -493,7 +494,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Compare Phoenix ResultSet with CSV file content
 PreparedStatement statement = conn
-.prepareStatement(SELECT CKEY, CVARCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
+.prepareStatement(SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
 + DATATYPE_TABLE);
 ResultSet phoenixResultSet = statement.executeQuery();
 parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES),
@@ -511,9 +512,12 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 i++;
 }
 // special case for matching date, time values
-assertEquals(DateUtil.parseTime(record.get(8)),
+String timeFieldValue = record.get(9);
+assertEquals(timeFieldValue.isEmpty() ? null : 
DateUtil.parseTime(record.get(9)),
 phoenixResultSet.getTime(CTIME));
-assertEquals(DateUtil.parseDate(record.get(9)),
+
+String dateField = 

phoenix git commit: LP-1277 Support nulls in CHAR fields in CSV loader

2015-06-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 047b8ca60 - 987f7afbe


LP-1277 Support nulls in CHAR fields in CSV loader


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/987f7afb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/987f7afb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/987f7afb

Branch: refs/heads/4.x-HBase-1.0
Commit: 987f7afbea023fbddca12614f500d77ab1f69b31
Parents: 047b8ca
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Jun 25 21:36:51 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Sun Jun 28 20:44:53 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java   | 18 +++---
 .../org/apache/phoenix/schema/types/PChar.java|  3 ---
 .../phoenix/util/csv/CsvUpsertExecutor.java   |  5 -
 3 files changed, 15 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/987f7afb/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index d07ed8d..c7287ea 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -46,9 +46,10 @@ import org.junit.Test;
 public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT {
 
 private static final String DATATYPE_TABLE = DATATYPE;
-private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
-+ KEY1,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
-+ 
KEY2,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n;
+private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, CCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
++ KEY1,A,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
++ 
KEY2,B,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n
++ KEY3,,\n;
 private static final String STOCK_TABLE = STOCK_SYMBOL;
 private static final String STOCK_TABLE_MULTI = STOCK_SYMBOL_MULTI;
 private static final String STOCK_CSV_VALUES = AAPL,APPLE Inc.\n
@@ -480,7 +481,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 String statements = CREATE TABLE IF NOT EXISTS 
 + DATATYPE_TABLE
 +  (CKEY VARCHAR NOT NULL PRIMARY KEY,
-+   CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL 
DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, 
CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
++   CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, 
CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT 
BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
 conn = DriverManager.getConnection(getUrl())
 .unwrap(PhoenixConnection.class);
 PhoenixRuntime.executeStatements(conn,
@@ -493,7 +494,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Compare Phoenix ResultSet with CSV file content
 PreparedStatement statement = conn
-.prepareStatement(SELECT CKEY, CVARCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
+.prepareStatement(SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
 + DATATYPE_TABLE);
 ResultSet phoenixResultSet = statement.executeQuery();
 parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES),
@@ -511,9 +512,12 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 i++;
 }
 // special case for matching date, time values
-assertEquals(DateUtil.parseTime(record.get(8)),
+String timeFieldValue = record.get(9);
+assertEquals(timeFieldValue.isEmpty() ? null : 
DateUtil.parseTime(record.get(9)),
 phoenixResultSet.getTime(CTIME));
-assertEquals(DateUtil.parseDate(record.get(9)),
+
+String dateField 

phoenix git commit: LP-1277 Support nulls in CHAR fields in CSV loader

2015-06-29 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.1 798155f07 - 4f7487110


LP-1277 Support nulls in CHAR fields in CSV loader


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4f748711
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4f748711
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4f748711

Branch: refs/heads/4.4-HBase-1.1
Commit: 4f748711093daf3c09f0e5af86203cbb6ef2dea3
Parents: 798155f
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Jun 25 21:36:51 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 29 08:38:34 2015 +0200

--
 .../phoenix/end2end/CSVCommonsLoaderIT.java   | 18 +++---
 .../org/apache/phoenix/schema/types/PChar.java|  3 ---
 .../phoenix/util/csv/CsvUpsertExecutor.java   |  5 -
 3 files changed, 15 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/4f748711/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
index d07ed8d..c7287ea 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java
@@ -46,9 +46,10 @@ import org.junit.Test;
 public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT {
 
 private static final String DATATYPE_TABLE = DATATYPE;
-private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
-+ KEY1,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
-+ 
KEY2,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n;
+private static final String DATATYPES_CSV_VALUES = CKEY, CVARCHAR, CCHAR, 
CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, 
CDATE\n
++ KEY1,A,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 
10:59:59,1999-12-31 23:59:59\n
++ 
KEY2,B,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01
 00:00:01,2012-02-29 23:59:59\n
++ KEY3,,\n;
 private static final String STOCK_TABLE = STOCK_SYMBOL;
 private static final String STOCK_TABLE_MULTI = STOCK_SYMBOL_MULTI;
 private static final String STOCK_CSV_VALUES = AAPL,APPLE Inc.\n
@@ -480,7 +481,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 String statements = CREATE TABLE IF NOT EXISTS 
 + DATATYPE_TABLE
 +  (CKEY VARCHAR NOT NULL PRIMARY KEY,
-+   CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL 
DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, 
CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
++   CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, 
CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT 
BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);;
 conn = DriverManager.getConnection(getUrl())
 .unwrap(PhoenixConnection.class);
 PhoenixRuntime.executeStatements(conn,
@@ -493,7 +494,7 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 
 // Compare Phoenix ResultSet with CSV file content
 PreparedStatement statement = conn
-.prepareStatement(SELECT CKEY, CVARCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
+.prepareStatement(SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, 
CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM 
 + DATATYPE_TABLE);
 ResultSet phoenixResultSet = statement.executeQuery();
 parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES),
@@ -511,9 +512,12 @@ public class CSVCommonsLoaderIT extends 
BaseHBaseManagedTimeIT {
 i++;
 }
 // special case for matching date, time values
-assertEquals(DateUtil.parseTime(record.get(8)),
+String timeFieldValue = record.get(9);
+assertEquals(timeFieldValue.isEmpty() ? null : 
DateUtil.parseTime(record.get(9)),
 phoenixResultSet.getTime(CTIME));
-assertEquals(DateUtil.parseDate(record.get(9)),
+
+String dateField 

phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master b7f138246 - 583b5b1e1


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/583b5b1e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/583b5b1e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/583b5b1e

Branch: refs/heads/master
Commit: 583b5b1e115a81799cc3e6d0a20a0fe665f666e3
Parents: b7f1382
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 08:57:22 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/583b5b1e/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 828f776..3f73048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, ScannerContext scannerContext) 
throws IOException {



phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 b2c0cb900 - c1882ee27


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c1882ee2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c1882ee2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c1882ee2

Branch: refs/heads/4.x-HBase-1.1
Commit: c1882ee279293b560fda9beb10ac50b8d3ead589
Parents: b2c0cb9
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:22:49 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c1882ee2/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 828f776..3f73048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, ScannerContext scannerContext) 
throws IOException {



phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 1badce965 - fcd41402d


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fcd41402
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fcd41402
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fcd41402

Branch: refs/heads/4.x-HBase-0.98
Commit: fcd41402d309e33d1d4b3b04485f3869243596b8
Parents: 1badce9
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:22:00 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fcd41402/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 981e8cb..646e7e8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -32,9 +32,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, int limit) throws IOException {



phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 0c7b21028 - 928d4d715


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/928d4d71
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/928d4d71
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/928d4d71

Branch: refs/heads/4.x-HBase-1.0
Commit: 928d4d71519ce31785144fea898eabf3c232f507
Parents: 0c7b210
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:22:22 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/928d4d71/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index ff9ac76..2bda23d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, int limit) throws IOException {



phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.0 1041efde0 - c30938336


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c3093833
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c3093833
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c3093833

Branch: refs/heads/4.4-HBase-1.0
Commit: c309383360d7f7fc194a5b1c86bc52757e3026ee
Parents: 1041efd
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:18:25 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c3093833/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index ff9ac76..2bda23d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, int limit) throws IOException {



phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-1.1 f5cdf4391 - b4e1c5dab


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b4e1c5da
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b4e1c5da
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b4e1c5da

Branch: refs/heads/4.4-HBase-1.1
Commit: b4e1c5dabcede13d7523dffa1e82aa295aa0ee01
Parents: f5cdf43
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:19:59 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4e1c5da/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 828f776..3f73048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -33,9 +33,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, ScannerContext scannerContext) 
throws IOException {



phoenix git commit: PHOENIX-2022 Make BaseRegionScanner.next abstract

2015-06-01 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.4-HBase-0.98 9498eb55d - bc2856b75


PHOENIX-2022 Make BaseRegionScanner.next abstract

Avoid infinite recursion by removing a recursive call within
BaseRegionScanner.next, which was already being used as an
abstract method.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bc2856b7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bc2856b7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bc2856b7

Branch: refs/heads/4.4-HBase-0.98
Commit: bc2856b75d4b040a06ed4d2b12fb2fbede623d97
Parents: 9498eb5
Author: Gabriel Reid gabri...@ngdata.com
Authored: Mon Jun 1 08:57:22 2015 +0200
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Mon Jun 1 17:17:19 2015 +0200

--
 .../java/org/apache/phoenix/coprocessor/BaseRegionScanner.java   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bc2856b7/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
index 981e8cb..646e7e8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/BaseRegionScanner.java
@@ -32,9 +32,7 @@ public abstract class BaseRegionScanner implements 
RegionScanner {
 }
 
 @Override
-public boolean next(ListCell results) throws IOException {
-return next(results);
-}
+public abstract boolean next(ListCell results) throws IOException;
 
 @Override
 public boolean next(ListCell result, int limit) throws IOException {



phoenix git commit: PHOENIX-1984 Make INSTR 1-based instead of 0-based

2015-05-21 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 cef3610e5 - 6f5dad93d


PHOENIX-1984 Make INSTR 1-based instead of 0-based

Bring functionality of INSTR built-in function in-line with other
SQL string functions, with indexing of strings starting at 1.

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6f5dad93
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6f5dad93
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6f5dad93

Branch: refs/heads/4.x-HBase-0.98
Commit: 6f5dad93df5ef6e3e2b446a4630bd0919c7575e6
Parents: cef3610
Author: NAVEEN MADHIRE vmadh...@indiana.edu
Authored: Mon May 18 22:14:57 2015 -0500
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu May 21 17:34:10 2015 +0200

--
 .../apache/phoenix/end2end/InstrFunctionIT.java | 12 ++---
 .../expression/function/InstrFunction.java  |  2 +-
 .../expression/function/InstrFunctionTest.java  | 48 ++--
 3 files changed, 31 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f5dad93/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
index 57c0661..b869ff4 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
@@ -63,7 +63,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -71,7 +71,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -79,7 +79,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -87,7 +87,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -95,7 +95,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 }
 
 @Test
@@ -103,7 +103,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT 
{
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 } 
 
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6f5dad93/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
index 317d4b3..7a002f8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
@@ -82,7 +82,7 @@ public class InstrFunction extends ScalarFunction{
 
 String sourceStr = (String) PVarchar.INSTANCE.toObject(ptr, 
getChildren().get(0).getSortOrder());
 
-position = sourceStr.indexOf(strToSearch);
+position = 

phoenix git commit: PHOENIX-1984 Make INSTR 1-based instead of 0-based

2015-05-21 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 b9712982f - a4a79e469


PHOENIX-1984 Make INSTR 1-based instead of 0-based

Bring functionality of INSTR built-in function in-line with other
SQL string functions, with indexing of strings starting at 1.

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a4a79e46
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a4a79e46
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a4a79e46

Branch: refs/heads/4.x-HBase-1.0
Commit: a4a79e469929d5f4d787afcefd9f5f7867902856
Parents: b971298
Author: NAVEEN MADHIRE vmadh...@indiana.edu
Authored: Mon May 18 22:14:57 2015 -0500
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu May 21 17:33:43 2015 +0200

--
 .../apache/phoenix/end2end/InstrFunctionIT.java | 12 ++---
 .../expression/function/InstrFunction.java  |  2 +-
 .../expression/function/InstrFunctionTest.java  | 48 ++--
 3 files changed, 31 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4a79e46/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
index 57c0661..b869ff4 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
@@ -63,7 +63,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -71,7 +71,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -79,7 +79,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -87,7 +87,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -95,7 +95,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 }
 
 @Test
@@ -103,7 +103,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT 
{
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 } 
 
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a4a79e46/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
index 317d4b3..7a002f8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
@@ -82,7 +82,7 @@ public class InstrFunction extends ScalarFunction{
 
 String sourceStr = (String) PVarchar.INSTANCE.toObject(ptr, 
getChildren().get(0).getSortOrder());
 
-position = sourceStr.indexOf(strToSearch);
+position = sourceStr.indexOf(strToSearch) 

phoenix git commit: PHOENIX-1984 Make INSTR 1-based instead of 0-based

2015-05-21 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master d3ff0798f - c2fed1dac


PHOENIX-1984 Make INSTR 1-based instead of 0-based

Bring functionality of INSTR built-in function in-line with other
SQL string functions, with indexing of strings starting at 1.

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c2fed1da
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c2fed1da
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c2fed1da

Branch: refs/heads/master
Commit: c2fed1dac8305f489939fc18e47cd2c2a6c596d8
Parents: d3ff079
Author: NAVEEN MADHIRE vmadh...@indiana.edu
Authored: Mon May 18 22:14:57 2015 -0500
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu May 21 17:24:47 2015 +0200

--
 .../apache/phoenix/end2end/InstrFunctionIT.java | 12 ++---
 .../expression/function/InstrFunction.java  |  2 +-
 .../expression/function/InstrFunctionTest.java  | 48 ++--
 3 files changed, 31 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2fed1da/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
index 57c0661..b869ff4 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
@@ -63,7 +63,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -71,7 +71,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcdefghijkl,fgh);
 String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
-testInstr(conn, queryToExecute, 5);
+testInstr(conn, queryToExecute, 6);
 }
 
 @Test
@@ -79,7 +79,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -87,7 +87,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, abcde fghijkl,lmn);
 String queryToExecute = SELECT INSTR(name, 'lmn') FROM SAMPLE;
-testInstr(conn, queryToExecute, -1);
+testInstr(conn, queryToExecute, 0);
 }
 
 @Test
@@ -95,7 +95,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, ASC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 }
 
 @Test
@@ -103,7 +103,7 @@ public class InstrFunctionIT extends BaseHBaseManagedTimeIT 
{
 Connection conn = DriverManager.getConnection(getUrl());
 initTable(conn, DESC, AɚɦFGH,ɚɦ);
 String queryToExecute = SELECT INSTR(name, 'ɚɦ') FROM SAMPLE;
-testInstr(conn, queryToExecute, 1);
+testInstr(conn, queryToExecute, 2);
 } 
 
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c2fed1da/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
index 317d4b3..7a002f8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InstrFunction.java
@@ -82,7 +82,7 @@ public class InstrFunction extends ScalarFunction{
 
 String sourceStr = (String) PVarchar.INSTANCE.toObject(ptr, 
getChildren().get(0).getSortOrder());
 
-position = sourceStr.indexOf(strToSearch);
+position = sourceStr.indexOf(strToSearch) + 1;
 

svn commit: r1677531 [2/2] - in /phoenix/site: publish/ publish/language/ source/src/site/ source/src/site/markdown/

2015-05-04 Thread greid
Added: phoenix/site/source/src/site/markdown/phoenix_spark.md
URL: 
http://svn.apache.org/viewvc/phoenix/site/source/src/site/markdown/phoenix_spark.md?rev=1677531view=auto
==
--- phoenix/site/source/src/site/markdown/phoenix_spark.md (added)
+++ phoenix/site/source/src/site/markdown/phoenix_spark.md Mon May  4 07:06:28 
2015
@@ -0,0 +1,155 @@
+
+#Apache Spark Integration
+
+phoenix-spark extends Phoenix's MapReduce support to allow Spark to load 
Phoenix tables as RDDs or
+DataFrames and enables persisting RDDs of Tuples back to Phoenix.
+
+## Prerequisites
+   Phoenix v4.4.0 and above
+   Spark 1.3.0
+
+
+## Reading Phoenix Tables
+
+Given a Phoenix table with the following DDL
+
+```sql
+CREATE TABLE TABLE1 (ID BIGINT NOT NULL PRIMARY KEY, COL1 VARCHAR);
+UPSERT INTO TABLE1 (ID, COL1) VALUES (1, 'test_row_1');
+UPSERT INTO TABLE1 (ID, COL1) VALUES (2, 'test_row_2');
+```
+
+### Load as a DataFrame using the Data Source API
+```scala
+import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
+import org.apache.phoenix.spark._
+
+val sc = new SparkContext(local, phoenix-test)
+val sqlContext = new SQLContext(sc)
+
+val df = sqlContext.load(
+  org.apache.phoenix.spark, 
+  Map(table - TABLE1, zkUrl - phoenix-server:2181)
+)
+
+df
+  .filter(df(COL1) === test_row_1  df(ID) === 1L)
+  .select(df(ID))
+  .show
+```
+
+### Load as a DataFrame directly using a Configuration object
+```scala
+import org.apache.hadoop.conf.Configuration
+import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
+import org.apache.phoenix.spark._
+
+val configuration = new Configuration()
+// Can set Phoenix-specific settings, requires 'hbase.zookeeper.quorum'
+
+val sc = new SparkContext(local, phoenix-test)
+val sqlContext = new SQLContext(sc)
+
+// Load the columns 'ID' and 'COL1' from TABLE1 as a DataFrame
+val df = sqlContext.phoenixTableAsDataFrame(
+  TABLE1, Array(ID, COL1), conf = configuration
+)
+
+df.show
+```
+
+### Load as an RDD, using a Zookeeper URL
+```scala
+import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
+import org.apache.phoenix.spark._
+
+val sc = new SparkContext(local, phoenix-test)
+
+// Load the columns 'ID' and 'COL1' from TABLE1 as an RDD
+val rdd: RDD[Map[String, AnyRef]] = sc.phoenixTableAsRDD(
+  TABLE1, Seq(ID, COL1), zkUrl = Some(phoenix-server:2181)
+)
+
+rdd.count()
+
+val firstId = rdd1.first()(ID).asInstanceOf[Long]
+val firstCol = rdd1.first()(COL1).asInstanceOf[String]
+```
+
+## Saving RDDs to Phoenix 
+
+`saveToPhoenix` is an implicit method on RDD[Product], or an RDD of Tuples. 
The data types must
+correspond to the Java types Phoenix supports 
(http://phoenix.apache.org/language/datatypes.html)
+
+Given a Phoenix table with the following DDL
+
+```sql
+CREATE TABLE OUTPUT_TEST_TABLE (id BIGINT NOT NULL PRIMARY KEY, col1 VARCHAR, 
col2 INTEGER);
+```
+
+```scala
+import org.apache.spark.SparkContext
+import org.apache.phoenix.spark._
+
+val sc = new SparkContext(local, phoenix-test)
+val dataSet = List((1L, 1, 1), (2L, 2, 2), (3L, 3, 3))
+
+sc
+  .parallelize(dataSet)
+  .saveToPhoenix(
+OUTPUT_TEST_TABLE,
+Seq(ID,COL1,COL2),
+zkUrl = Some(phoenix-server:2181)
+  )
+```
+
+## Saving DataFrames to Phoenix
+
+The `save` is method on DataFrame allows passing in a data source type. You 
can use
+`org.apache.phoenix.spark`, and must also pass in a `table` and `zkUrl` 
parameter to
+specify which table and server to persist the DataFrame to. The column names 
are derived from
+the DataFrame's schema field names, and must match the Phoenix column names.
+
+The `save` method also takes a `SaveMode` option, for which only 
`SaveMode.Overwrite` is supported.
+
+Given two Phoenix tables with the following DDL:
+
+```sql
+CREATE TABLE INPUT_TABLE (id BIGINT NOT NULL PRIMARY KEY, col1 VARCHAR, col2 
INTEGER);
+CREATE TABLE OUTPUT_TABLE (id BIGINT NOT NULL PRIMARY KEY, col1 VARCHAR, col2 
INTEGER);
+```
+
+```scala
+import org.apache.spark.SparkContext
+import org.apache.spark.sql.SQLContext
+import org.apache.phoenix.spark._
+
+// Load INPUT_TABLE
+val sc = new SparkContext(local, phoenix-test)
+val sqlContext = new SQLContext(sc)
+val df = sqlContext.load(org.apache.phoenix.spark, Map(table - 
INPUT_TABLE,
+  zkUrl - hbaseConnectionString))
+
+// Save to OUTPUT_TABLE
+df.save(org.apache.phoenix.spark, SaveMode.Overwrite, Map(table - 
OUTPUT_TABLE, 
+  zkUrl - hbaseConnectionString))
+```
+
+## Notes
+
+The functions `phoenixTableAsDataFrame`, `phoenixTableAsRDD` and 
`saveToPhoenix` all support
+optionally specifying a `conf` Hadoop configuration parameter with custom 
Phoenix client settings,
+as well as an optional `zkUrl` parameter for the Phoenix connection URL.
+
+If `zkUrl` isn't specified, it's assumed that the hbase.zookeeper.quorum 
property has been set
+in the `conf` parameter. Similarly, if no configuration is passed in, 

svn commit: r1677531 [1/2] - in /phoenix/site: publish/ publish/language/ source/src/site/ source/src/site/markdown/

2015-05-04 Thread greid
Author: greid
Date: Mon May  4 07:06:28 2015
New Revision: 1677531

URL: http://svn.apache.org/r1677531
Log:
Add page on spark integration

Added:
phoenix/site/publish/phoenix_spark.html
phoenix/site/source/src/site/markdown/phoenix_spark.md
Modified:
phoenix/site/publish/Phoenix-in-15-minutes-or-less.html
phoenix/site/publish/array_type.html
phoenix/site/publish/building.html
phoenix/site/publish/building_website.html
phoenix/site/publish/bulk_dataload.html
phoenix/site/publish/contributing.html
phoenix/site/publish/develop.html
phoenix/site/publish/download.html
phoenix/site/publish/dynamic_columns.html
phoenix/site/publish/faq.html
phoenix/site/publish/flume.html
phoenix/site/publish/index.html
phoenix/site/publish/installation.html
phoenix/site/publish/issues.html
phoenix/site/publish/joins.html
phoenix/site/publish/language/datatypes.html
phoenix/site/publish/language/functions.html
phoenix/site/publish/language/index.html
phoenix/site/publish/mailing_list.html
phoenix/site/publish/multi-tenancy.html
phoenix/site/publish/news.html
phoenix/site/publish/paged.html
phoenix/site/publish/performance.html
phoenix/site/publish/pherf.html
phoenix/site/publish/phoenix_mr.html
phoenix/site/publish/phoenix_on_emr.html
phoenix/site/publish/pig_integration.html
phoenix/site/publish/recent.html
phoenix/site/publish/release.html
phoenix/site/publish/resources.html
phoenix/site/publish/roadmap.html
phoenix/site/publish/salted.html
phoenix/site/publish/secondary_indexing.html
phoenix/site/publish/sequences.html
phoenix/site/publish/server.html
phoenix/site/publish/skip_scan.html
phoenix/site/publish/source.html
phoenix/site/publish/subqueries.html
phoenix/site/publish/team.html
phoenix/site/publish/tracing.html
phoenix/site/publish/tuning.html
phoenix/site/publish/update_statistics.html
phoenix/site/publish/upgrading.html
phoenix/site/publish/views.html
phoenix/site/publish/who_is_using.html
phoenix/site/source/src/site/site.xml

Modified: phoenix/site/publish/Phoenix-in-15-minutes-or-less.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/Phoenix-in-15-minutes-or-less.html?rev=1677531r1=1677530r2=1677531view=diff
==
--- phoenix/site/publish/Phoenix-in-15-minutes-or-less.html (original)
+++ phoenix/site/publish/Phoenix-in-15-minutes-or-less.html Mon May  4 07:06:28 
2015
@@ -1,7 +1,7 @@
 
 !DOCTYPE html
 !--
- Generated by Apache Maven Doxia at 2015-05-01
+ Generated by Apache Maven Doxia at 2015-05-04
  Rendered using Reflow Maven Skin 1.1.0 
(http://andriusvelykis.github.io/reflow-maven-skin)
 --
 html  xml:lang=en lang=en
@@ -88,6 +88,7 @@
li a 
href=flume.html title=Apache Flume PluginApache Flume Plugin/a/li
li a 
href=pig_integration.html title=Apache Pig IntegrationApache Pig 
Integration/a/li
li a 
href=phoenix_mr.html title=Map Reduce IntegrationMap Reduce 
Integration/a/li
+   li a 
href=phoenix_spark.html title=Phoenix Spark IntegrationPhoenix Spark 
Integration/a/li
/ul
/li
li class=dropdown
@@ -313,6 +314,9 @@
li 
a 
href=phoenix_mr.html title=Map Reduce IntegrationMap Reduce Integration/a
/li
+   li 
+   a 
href=phoenix_spark.html title=Phoenix Spark IntegrationPhoenix Spark 
Integration/a
+   /li
/ul
/div
div class=span2 bottom-nav

Modified: phoenix/site/publish/array_type.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/array_type.html?rev=1677531r1=1677530r2=1677531view=diff
==
--- phoenix/site/publish/array_type.html (original)
+++ phoenix/site/publish/array_type.html Mon May  4 07:06:28 2015
@@ -1,7 +1,7 @@
 
 !DOCTYPE html
 !--
- Generated by Apache Maven Doxia at 2015-05-01
+ Generated by Apache Maven Doxia at 2015-05-04
  Rendered using Reflow Maven Skin 1.1.0 
(http://andriusvelykis.github.io/reflow-maven-skin)
 --
 html  xml:lang=en lang=en
@@ -88,6 +88,7

svn commit: r1677108 - in /phoenix/site: publish/ publish/language/ source/src/site/markdown/

2015-05-01 Thread greid
Author: greid
Date: Fri May  1 11:03:00 2015
New Revision: 1677108

URL: http://svn.apache.org/r1677108
Log:
Add info on providing tab character as delim for bulk load

Modified:
phoenix/site/publish/Phoenix-in-15-minutes-or-less.html
phoenix/site/publish/array_type.html
phoenix/site/publish/building.html
phoenix/site/publish/building_website.html
phoenix/site/publish/bulk_dataload.html
phoenix/site/publish/contributing.html
phoenix/site/publish/develop.html
phoenix/site/publish/download.html
phoenix/site/publish/dynamic_columns.html
phoenix/site/publish/faq.html
phoenix/site/publish/flume.html
phoenix/site/publish/index.html
phoenix/site/publish/installation.html
phoenix/site/publish/issues.html
phoenix/site/publish/joins.html
phoenix/site/publish/language/datatypes.html
phoenix/site/publish/language/functions.html
phoenix/site/publish/language/index.html
phoenix/site/publish/mailing_list.html
phoenix/site/publish/multi-tenancy.html
phoenix/site/publish/news.html
phoenix/site/publish/paged.html
phoenix/site/publish/performance.html
phoenix/site/publish/pherf.html
phoenix/site/publish/phoenix_mr.html
phoenix/site/publish/phoenix_on_emr.html
phoenix/site/publish/pig_integration.html
phoenix/site/publish/recent.html
phoenix/site/publish/release.html
phoenix/site/publish/resources.html
phoenix/site/publish/roadmap.html
phoenix/site/publish/salted.html
phoenix/site/publish/secondary_indexing.html
phoenix/site/publish/sequences.html
phoenix/site/publish/server.html
phoenix/site/publish/skip_scan.html
phoenix/site/publish/source.html
phoenix/site/publish/subqueries.html
phoenix/site/publish/team.html
phoenix/site/publish/tracing.html
phoenix/site/publish/tuning.html
phoenix/site/publish/update_statistics.html
phoenix/site/publish/upgrading.html
phoenix/site/publish/views.html
phoenix/site/publish/who_is_using.html
phoenix/site/source/src/site/markdown/bulk_dataload.md

Modified: phoenix/site/publish/Phoenix-in-15-minutes-or-less.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/Phoenix-in-15-minutes-or-less.html?rev=1677108r1=1677107r2=1677108view=diff
==
--- phoenix/site/publish/Phoenix-in-15-minutes-or-less.html (original)
+++ phoenix/site/publish/Phoenix-in-15-minutes-or-less.html Fri May  1 11:03:00 
2015
@@ -1,7 +1,7 @@
 
 !DOCTYPE html
 !--
- Generated by Apache Maven Doxia at 2015-04-29
+ Generated by Apache Maven Doxia at 2015-05-01
  Rendered using Reflow Maven Skin 1.1.0 
(http://andriusvelykis.github.io/reflow-maven-skin)
 --
 html  xml:lang=en lang=en

Modified: phoenix/site/publish/array_type.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/array_type.html?rev=1677108r1=1677107r2=1677108view=diff
==
--- phoenix/site/publish/array_type.html (original)
+++ phoenix/site/publish/array_type.html Fri May  1 11:03:00 2015
@@ -1,7 +1,7 @@
 
 !DOCTYPE html
 !--
- Generated by Apache Maven Doxia at 2015-04-29
+ Generated by Apache Maven Doxia at 2015-05-01
  Rendered using Reflow Maven Skin 1.1.0 
(http://andriusvelykis.github.io/reflow-maven-skin)
 --
 html  xml:lang=en lang=en

Modified: phoenix/site/publish/building.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/building.html?rev=1677108r1=1677107r2=1677108view=diff
==
--- phoenix/site/publish/building.html (original)
+++ phoenix/site/publish/building.html Fri May  1 11:03:00 2015
@@ -1,7 +1,7 @@
 
 !DOCTYPE html
 !--
- Generated by Apache Maven Doxia at 2015-04-29
+ Generated by Apache Maven Doxia at 2015-05-01
  Rendered using Reflow Maven Skin 1.1.0 
(http://andriusvelykis.github.io/reflow-maven-skin)
 --
 html  xml:lang=en lang=en

Modified: phoenix/site/publish/building_website.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/building_website.html?rev=1677108r1=1677107r2=1677108view=diff
==
--- phoenix/site/publish/building_website.html (original)
+++ phoenix/site/publish/building_website.html Fri May  1 11:03:00 2015
@@ -1,7 +1,7 @@
 
 !DOCTYPE html
 !--
- Generated by Apache Maven Doxia at 2015-04-29
+ Generated by Apache Maven Doxia at 2015-05-01
  Rendered using Reflow Maven Skin 1.1.0 
(http://andriusvelykis.github.io/reflow-maven-skin)
 --
 html  xml:lang=en lang=en

Modified: phoenix/site/publish/bulk_dataload.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/bulk_dataload.html?rev=1677108r1=1677107r2=1677108view=diff
==
--- phoenix/site/publish/bulk_dataload.html (original)
+++ phoenix/site/publish/bulk_dataload.html Fri May  1 11

phoenix git commit: PHOENIX-1733: sqlline.py doesn't allow connecting with a tenant-specific connection

2015-04-19 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 5df1b728e - fefc319c9


PHOENIX-1733: sqlline.py doesn't allow connecting with a tenant-specific 
connection

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fefc319c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fefc319c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fefc319c

Branch: refs/heads/master
Commit: fefc319c98339d7da0b4105ee6f396d33128c45c
Parents: 5df1b72
Author: Abhishek Sreenivasa asreeniv...@salesforce.com
Authored: Thu Apr 16 12:57:26 2015 -0700
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Sun Apr 19 21:26:29 2015 +0200

--
 bin/phoenix_utils.py | 16 +++-
 bin/psql.py  |  6 +-
 bin/sqlline.py   |  4 ++--
 3 files changed, 18 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fefc319c/bin/phoenix_utils.py
--
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index 182cdf3..7ef8d96 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -20,7 +20,6 @@
 
 
 import os
-import sys
 import fnmatch
 
 def find(pattern, classPaths):
@@ -95,3 +94,18 @@ def setPath():
  testjar = find(PHOENIX_TESTS_JAR_PATTERN, phoenix_class_path)
 
  return 
+
+def shell_quote(args):
+
+Return the platform specific shell quoted string. Handles Windows and *nix 
platforms.
+
+:param args: array of shell arguments
+:return: shell quoted string
+
+if os.name == 'nt':
+import subprocess
+return subprocess.list2cmdline(args)
+else:
+# pipes module isn't available on Windows
+import pipes
+return  .join([pipes.quote(v) for v in args])
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/fefc319c/bin/psql.py
--
diff --git a/bin/psql.py b/bin/psql.py
index 247001a..5b26dcd 100755
--- a/bin/psql.py
+++ b/bin/psql.py
@@ -26,11 +26,7 @@ import phoenix_utils
 
 phoenix_utils.setPath()
 
-if os.name == 'nt':
-args = subprocess.list2cmdline(sys.argv[1:])
-else:
-import pipes# pipes module isn't available on Windows
-args =  .join([pipes.quote(v) for v in sys.argv[1:]])
+args = phoenix_utils.shell_quote(sys.argv[1:])
 
 # HBase configuration folder path (where hbase-site.xml reside) for
 # HBase/Phoenix client side property override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/fefc319c/bin/sqlline.py
--
diff --git a/bin/sqlline.py b/bin/sqlline.py
index f48e527..2b2750a 100755
--- a/bin/sqlline.py
+++ b/bin/sqlline.py
@@ -46,7 +46,7 @@ localhost:2181:/hbase ../examples/stock_symbol.sql
 sqlfile = 
 
 if len(sys.argv)  2:
-sqlfile = --run= + sys.argv[2]
+sqlfile = --run= + phoenix_utils.shell_quote([sys.argv[2]])
 
 colorSetting = true
 # disable color setting for windows OS
@@ -57,7 +57,7 @@ java_cmd = 'java -cp ' + phoenix_utils.hbase_conf_path + 
os.pathsep + phoenix_u
 ' -Dlog4j.configuration=file:' + \
 os.path.join(phoenix_utils.current_dir, log4j.properties) + \
  sqlline.SqlLine -d org.apache.phoenix.jdbc.PhoenixDriver \
--u jdbc:phoenix: + sys.argv[1] + \
+-u jdbc:phoenix: + phoenix_utils.shell_quote([sys.argv[1]]) + \
  -n none -p none --color= + colorSetting +  --fastConnect=false 
--verbose=true \
 --isolation=TRANSACTION_READ_COMMITTED  + sqlfile
 



phoenix git commit: PHOENIX-1755 Improve error logging if csv line has insufficient fields

2015-04-07 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 15f769fa4 - 5aea0b82d


PHOENIX-1755 Improve error logging if csv line has insufficient fields

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5aea0b82
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5aea0b82
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5aea0b82

Branch: refs/heads/4.x-HBase-0.98
Commit: 5aea0b82d8caada3d7478666b880ef7c59ed89c9
Parents: 15f769f
Author: Karel Vervaeke ka...@ngdata.com
Authored: Thu Mar 19 16:10:07 2015 +0100
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Apr 7 21:15:19 2015 +0200

--
 .../java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5aea0b82/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
index b5f6f9f..0e3294b 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/csv/CsvUpsertExecutor.java
@@ -144,6 +144,11 @@ public class CsvUpsertExecutor implements Closeable {
  */
 void execute(CSVRecord csvRecord) {
 try {
+if (csvRecord.size()  conversionFunctions.size()) {
+String message = String.format(CSV record does not have 
enough values (has %d, but needs %d),
+csvRecord.size(), conversionFunctions.size());
+throw new IllegalArgumentException(message);
+}
 for (int fieldIndex = 0; fieldIndex  conversionFunctions.size(); 
fieldIndex++) {
 Object sqlValue = 
conversionFunctions.get(fieldIndex).apply(csvRecord.get(fieldIndex));
 if (sqlValue != null) {



phoenix git commit: PHOENIX-1712 Add INSTR function

2015-04-02 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master f766a780c - 1f942b1f0


PHOENIX-1712 Add INSTR function

Add method for detecting a substring within another string.

Signed-off-by: Gabriel Reid gabri...@ngdata.com


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1f942b1f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1f942b1f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1f942b1f

Branch: refs/heads/master
Commit: 1f942b1f0e815674f1917c18167d848769435148
Parents: f766a78
Author: NAVEEN MADHIRE vmadh...@indiana.edu
Authored: Mon Mar 16 23:11:45 2015 -0400
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu Apr 2 21:07:55 2015 +0200

--
 .../apache/phoenix/end2end/InstrFunctionIT.java | 126 +++
 .../phoenix/expression/ExpressionType.java  |   4 +-
 .../expression/function/InstrFunction.java  | 105 
 .../expression/function/InstrFunctionTest.java  | 108 
 4 files changed, 342 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1f942b1f/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
new file mode 100644
index 000..57c0661
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InstrFunctionIT.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+
+import org.junit.Test;
+
+public class InstrFunctionIT extends BaseHBaseManagedTimeIT {
+private void initTable(Connection conn, String sortOrder, String s, String 
subStr) throws Exception {
+String ddl = CREATE TABLE SAMPLE (name VARCHAR NOT NULL PRIMARY KEY  
+ sortOrder + , substr VARCHAR);
+conn.createStatement().execute(ddl);
+String dml = UPSERT INTO SAMPLE VALUES(?,?);
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.setString(1, s);
+stmt.setString(2, subStr);
+stmt.execute();
+conn.commit();
+}
+
+ private void testInstr(Connection conn, String queryToExecute, Integer 
expValue) throws Exception {
+ResultSet rs;
+rs = conn.createStatement().executeQuery(queryToExecute);
+assertTrue(rs.next());
+assertEquals(expValue.intValue(), rs.getInt(1));
+assertFalse(rs.next());
+
+}
+
+  private void testInstrFilter(Connection conn, String queryToExecute, 
String expected) throws Exception {
+ResultSet rs;
+PreparedStatement stmt = conn.prepareStatement(queryToExecute);
+rs = stmt.executeQuery();
+assertTrue(rs.next());
+assertEquals(expected, rs.getString(1));
+
+}
+
+@Test
+public void testSingleByteInstrAscending() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+initTable(conn, ASC, abcdefghijkl,fgh);
+String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
+testInstr(conn, queryToExecute, 5);
+}
+
+@Test
+public void testSingleByteInstrDescending() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+initTable(conn, DESC, abcdefghijkl,fgh);
+String queryToExecute = SELECT INSTR(name, 'fgh') FROM SAMPLE;
+testInstr(conn, queryToExecute, 5);
+}
+
+@Test
+public void testSingleByteInstrAscendingNoString() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+

phoenix git commit: PHOENIX-1770 Correct exit code from bin scripts

2015-03-26 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.3 1011488dc - eeff9bee3


PHOENIX-1770 Correct exit code from bin scripts

Make the python scripts under bin/ exit with the exit code that
was returned from the underlying java command.

Contributed by Mark Tse.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/eeff9bee
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/eeff9bee
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/eeff9bee

Branch: refs/heads/4.3
Commit: eeff9bee36c362746ee89c675de3be13d54a1c9e
Parents: 1011488
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Mar 26 08:43:48 2015 +0100
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu Mar 26 08:43:48 2015 +0100

--
 bin/end2endTest.py |  3 ++-
 bin/performance.py | 13 ++---
 bin/psql.py|  3 ++-
 3 files changed, 14 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeff9bee/bin/end2endTest.py
--
diff --git a/bin/end2endTest.py b/bin/end2endTest.py
index 96886c7..a5993dc 100755
--- a/bin/end2endTest.py
+++ b/bin/end2endTest.py
@@ -44,4 +44,5 @@ java_cmd = java -cp  + hbase_config_path + os.pathsep + 
phoenix_jar_path + os.
 hbase_library_path +  org.apache.phoenix.end2end.End2EndTestDriver  + \
 ' '.join(sys.argv[1:])
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeff9bee/bin/performance.py
--
diff --git a/bin/performance.py b/bin/performance.py
index c69edfd..b9df433 100755
--- a/bin/performance.py
+++ b/bin/performance.py
@@ -85,7 +85,9 @@ print -
 print \nCreating performance table...
 createFileWithContent(ddl, createtable)
 
-subprocess.call(execute + ddl, shell=True)
+exitcode = subprocess.call(execute + ddl, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # Write real,user,sys time on console for the following queries
 queryex(1 - Count, SELECT COUNT(1) FROM %s; % (table))
@@ -95,11 +97,16 @@ queryex(4 - Truncate + Group By, SELECT 
TRUNC(DATE,'DAY') DAY FROM %s GROUP B
 queryex(5 - Filter + Count, SELECT COUNT(1) FROM %s WHERE CORE10; % 
(table))
 
 print \nGenerating and upserting data...
-subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, rowcount), 
shell=True)
+exitcode = subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, 
rowcount), shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
+
 print \n
 createFileWithContent(qry, statements)
 
-subprocess.call(execute + data + ' ' + qry, shell=True)
+exitcode = subprocess.call(execute + data + ' ' + qry, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # clear temporary files
 delfile(ddl)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeff9bee/bin/psql.py
--
diff --git a/bin/psql.py b/bin/psql.py
index 34a95df..247001a 100755
--- a/bin/psql.py
+++ b/bin/psql.py
@@ -39,4 +39,5 @@ java_cmd = 'java -cp ' + phoenix_utils.hbase_conf_path + 
os.pathsep + phoenix_u
 os.path.join(phoenix_utils.current_dir, log4j.properties) + \
  org.apache.phoenix.util.PhoenixRuntime  + args 
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)



phoenix git commit: PHOENIX-1770 Correct exit code from bin scripts

2015-03-26 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 c633483c6 - 3a57a6ea9


PHOENIX-1770 Correct exit code from bin scripts

Make the python scripts under bin/ exit with the exit code that
was returned from the underlying java command.

Contributed by Mark Tse.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3a57a6ea
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3a57a6ea
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3a57a6ea

Branch: refs/heads/4.x-HBase-0.98
Commit: 3a57a6ea97bbb52facf6a20bda37d9b612e34a17
Parents: c633483
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Mar 26 08:43:48 2015 +0100
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu Mar 26 08:46:51 2015 +0100

--
 bin/end2endTest.py |  3 ++-
 bin/performance.py | 13 ++---
 bin/psql.py|  3 ++-
 3 files changed, 14 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3a57a6ea/bin/end2endTest.py
--
diff --git a/bin/end2endTest.py b/bin/end2endTest.py
index 96886c7..a5993dc 100755
--- a/bin/end2endTest.py
+++ b/bin/end2endTest.py
@@ -44,4 +44,5 @@ java_cmd = java -cp  + hbase_config_path + os.pathsep + 
phoenix_jar_path + os.
 hbase_library_path +  org.apache.phoenix.end2end.End2EndTestDriver  + \
 ' '.join(sys.argv[1:])
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3a57a6ea/bin/performance.py
--
diff --git a/bin/performance.py b/bin/performance.py
index c69edfd..b9df433 100755
--- a/bin/performance.py
+++ b/bin/performance.py
@@ -85,7 +85,9 @@ print -
 print \nCreating performance table...
 createFileWithContent(ddl, createtable)
 
-subprocess.call(execute + ddl, shell=True)
+exitcode = subprocess.call(execute + ddl, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # Write real,user,sys time on console for the following queries
 queryex(1 - Count, SELECT COUNT(1) FROM %s; % (table))
@@ -95,11 +97,16 @@ queryex(4 - Truncate + Group By, SELECT 
TRUNC(DATE,'DAY') DAY FROM %s GROUP B
 queryex(5 - Filter + Count, SELECT COUNT(1) FROM %s WHERE CORE10; % 
(table))
 
 print \nGenerating and upserting data...
-subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, rowcount), 
shell=True)
+exitcode = subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, 
rowcount), shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
+
 print \n
 createFileWithContent(qry, statements)
 
-subprocess.call(execute + data + ' ' + qry, shell=True)
+exitcode = subprocess.call(execute + data + ' ' + qry, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # clear temporary files
 delfile(ddl)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3a57a6ea/bin/psql.py
--
diff --git a/bin/psql.py b/bin/psql.py
index 34a95df..247001a 100755
--- a/bin/psql.py
+++ b/bin/psql.py
@@ -39,4 +39,5 @@ java_cmd = 'java -cp ' + phoenix_utils.hbase_conf_path + 
os.pathsep + phoenix_u
 os.path.join(phoenix_utils.current_dir, log4j.properties) + \
  org.apache.phoenix.util.PhoenixRuntime  + args 
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)



phoenix git commit: PHOENIX-1770 Correct exit code from bin scripts

2015-03-26 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master f941e89f4 - 1a1378422


PHOENIX-1770 Correct exit code from bin scripts

Make the python scripts under bin/ exit with the exit code that
was returned from the underlying java command.

Contributed by Mark Tse.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1a137842
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1a137842
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1a137842

Branch: refs/heads/master
Commit: 1a13784228f05ebe51e52ad80ae464d8531c2fe0
Parents: f941e89
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Mar 26 08:43:48 2015 +0100
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu Mar 26 08:45:49 2015 +0100

--
 bin/end2endTest.py |  3 ++-
 bin/performance.py | 13 ++---
 bin/psql.py|  3 ++-
 3 files changed, 14 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1a137842/bin/end2endTest.py
--
diff --git a/bin/end2endTest.py b/bin/end2endTest.py
index 96886c7..a5993dc 100755
--- a/bin/end2endTest.py
+++ b/bin/end2endTest.py
@@ -44,4 +44,5 @@ java_cmd = java -cp  + hbase_config_path + os.pathsep + 
phoenix_jar_path + os.
 hbase_library_path +  org.apache.phoenix.end2end.End2EndTestDriver  + \
 ' '.join(sys.argv[1:])
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1a137842/bin/performance.py
--
diff --git a/bin/performance.py b/bin/performance.py
index c69edfd..b9df433 100755
--- a/bin/performance.py
+++ b/bin/performance.py
@@ -85,7 +85,9 @@ print -
 print \nCreating performance table...
 createFileWithContent(ddl, createtable)
 
-subprocess.call(execute + ddl, shell=True)
+exitcode = subprocess.call(execute + ddl, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # Write real,user,sys time on console for the following queries
 queryex(1 - Count, SELECT COUNT(1) FROM %s; % (table))
@@ -95,11 +97,16 @@ queryex(4 - Truncate + Group By, SELECT 
TRUNC(DATE,'DAY') DAY FROM %s GROUP B
 queryex(5 - Filter + Count, SELECT COUNT(1) FROM %s WHERE CORE10; % 
(table))
 
 print \nGenerating and upserting data...
-subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, rowcount), 
shell=True)
+exitcode = subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, 
rowcount), shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
+
 print \n
 createFileWithContent(qry, statements)
 
-subprocess.call(execute + data + ' ' + qry, shell=True)
+exitcode = subprocess.call(execute + data + ' ' + qry, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # clear temporary files
 delfile(ddl)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1a137842/bin/psql.py
--
diff --git a/bin/psql.py b/bin/psql.py
index 34a95df..247001a 100755
--- a/bin/psql.py
+++ b/bin/psql.py
@@ -39,4 +39,5 @@ java_cmd = 'java -cp ' + phoenix_utils.hbase_conf_path + 
os.pathsep + phoenix_u
 os.path.join(phoenix_utils.current_dir, log4j.properties) + \
  org.apache.phoenix.util.PhoenixRuntime  + args 
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)



phoenix git commit: PHOENIX-1770 Correct exit code from bin scripts

2015-03-26 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.x 6cb6a3766 - 4d7161008


PHOENIX-1770 Correct exit code from bin scripts

Make the python scripts under bin/ exit with the exit code that
was returned from the underlying java command.

Contributed by Mark Tse.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4d716100
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4d716100
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4d716100

Branch: refs/heads/4.x-HBase-1.x
Commit: 4d71610081903a535767f1a462ba47e1ffec5191
Parents: 6cb6a37
Author: Gabriel Reid gabri...@ngdata.com
Authored: Thu Mar 26 08:43:48 2015 +0100
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Thu Mar 26 08:47:15 2015 +0100

--
 bin/end2endTest.py |  3 ++-
 bin/performance.py | 13 ++---
 bin/psql.py|  3 ++-
 3 files changed, 14 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/4d716100/bin/end2endTest.py
--
diff --git a/bin/end2endTest.py b/bin/end2endTest.py
index 96886c7..a5993dc 100755
--- a/bin/end2endTest.py
+++ b/bin/end2endTest.py
@@ -44,4 +44,5 @@ java_cmd = java -cp  + hbase_config_path + os.pathsep + 
phoenix_jar_path + os.
 hbase_library_path +  org.apache.phoenix.end2end.End2EndTestDriver  + \
 ' '.join(sys.argv[1:])
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/4d716100/bin/performance.py
--
diff --git a/bin/performance.py b/bin/performance.py
index c69edfd..b9df433 100755
--- a/bin/performance.py
+++ b/bin/performance.py
@@ -85,7 +85,9 @@ print -
 print \nCreating performance table...
 createFileWithContent(ddl, createtable)
 
-subprocess.call(execute + ddl, shell=True)
+exitcode = subprocess.call(execute + ddl, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # Write real,user,sys time on console for the following queries
 queryex(1 - Count, SELECT COUNT(1) FROM %s; % (table))
@@ -95,11 +97,16 @@ queryex(4 - Truncate + Group By, SELECT 
TRUNC(DATE,'DAY') DAY FROM %s GROUP B
 queryex(5 - Filter + Count, SELECT COUNT(1) FROM %s WHERE CORE10; % 
(table))
 
 print \nGenerating and upserting data...
-subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, rowcount), 
shell=True)
+exitcode = subprocess.call('java -jar %s %s' % (phoenix_utils.testjar, 
rowcount), shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
+
 print \n
 createFileWithContent(qry, statements)
 
-subprocess.call(execute + data + ' ' + qry, shell=True)
+exitcode = subprocess.call(execute + data + ' ' + qry, shell=True)
+if exitcode != 0:
+sys.exit(exitcode)
 
 # clear temporary files
 delfile(ddl)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/4d716100/bin/psql.py
--
diff --git a/bin/psql.py b/bin/psql.py
index 34a95df..247001a 100755
--- a/bin/psql.py
+++ b/bin/psql.py
@@ -39,4 +39,5 @@ java_cmd = 'java -cp ' + phoenix_utils.hbase_conf_path + 
os.pathsep + phoenix_u
 os.path.join(phoenix_utils.current_dir, log4j.properties) + \
  org.apache.phoenix.util.PhoenixRuntime  + args 
 
-subprocess.call(java_cmd, shell=True)
+exitcode = subprocess.call(java_cmd, shell=True)
+sys.exit(exitcode)



phoenix git commit: PHOENIX-1653 Support separate clusters for MR jobs

2015-03-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master 673847109 - ab9c9283e


PHOENIX-1653 Support separate clusters for MR jobs

Add support for the input and output formats of a Phoenix MapReduce job to
point to separate clusters using override configuration settings. Defaults to
existing behavior (HConstants.ZOOKEEPER_QUORUM)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ab9c9283
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ab9c9283
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ab9c9283

Branch: refs/heads/master
Commit: ab9c9283eca692b6f3ef03a598f4c012e63ef83a
Parents: 6738471
Author: gjacoby gjac...@salesforce.com
Authored: Fri Feb 27 16:49:14 2015 -0800
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Mar 24 20:08:08 2015 +0100

--
 .../phoenix/mapreduce/PhoenixInputFormat.java   | 15 ++--
 .../phoenix/mapreduce/PhoenixRecordWriter.java  |  2 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  2 +-
 .../index/PhoenixIndexImportMapper.java |  2 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  | 88 ++--
 .../util/PhoenixConfigurationUtil.java  | 72 ++--
 .../mapreduce/util/PhoenixMapReduceUtil.java| 22 -
 .../util/PhoenixConfigurationUtilTest.java  | 60 -
 .../pig/util/QuerySchemaParserFunction.java |  2 +-
 .../pig/util/SqlQueryToColumnInfoFunction.java  |  2 +-
 10 files changed, 219 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ab9c9283/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index a83b9ae..31759b4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -98,15 +98,16 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
  * @throws IOException
  * @throws SQLException
  */
-private QueryPlan getQueryPlan(final JobContext context,final 
Configuration configuration) throws IOException {
+private QueryPlan getQueryPlan(final JobContext context, final 
Configuration configuration)
+throws IOException {
 Preconditions.checkNotNull(context);
-try{
+try {
 final String currentScnValue = 
configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
 final Properties overridingProps = new Properties();
 if(currentScnValue != null) {
 overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
currentScnValue);
 }
-final Connection connection = 
ConnectionUtil.getConnection(configuration,overridingProps);
+final Connection connection = 
ConnectionUtil.getInputConnection(configuration, overridingProps);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
 Preconditions.checkNotNull(selectStatement);
 final Statement statement = connection.createStatement();
@@ -116,9 +117,11 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
 // Initialize the query plan so it sets up the parallel scans
 queryPlan.iterator();
 return queryPlan;
-} catch(Exception exception) {
-LOG.error(String.format(Failed to get the query plan with error 
[%s],exception.getMessage()));
+} catch (Exception exception) {
+LOG.error(String.format(Failed to get the query plan with error 
[%s],
+exception.getMessage()));
 throw new RuntimeException(exception);
 }
-   }
+}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ab9c9283/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
index 4d26bf4..5843076 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
@@ -46,7 +46,7 @@ public class PhoenixRecordWriterT extends DBWritable  
extends RecordWriterNul
 private long numRecords = 0;
 
 public PhoenixRecordWriter(final Configuration configuration) throws 

phoenix git commit: PHOENIX-1653 Support separate clusters for MR jobs

2015-03-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.x f4180fa40 - 7de8ee1e9


PHOENIX-1653 Support separate clusters for MR jobs

Add support for the input and output formats of a Phoenix MapReduce job to
point to separate clusters using override configuration settings. Defaults to
existing behavior (HConstants.ZOOKEEPER_QUORUM)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7de8ee1e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7de8ee1e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7de8ee1e

Branch: refs/heads/4.x-HBase-1.x
Commit: 7de8ee1e914f5e0008ca9d983869757e4ca92b78
Parents: f4180fa
Author: gjacoby gjac...@salesforce.com
Authored: Fri Feb 27 16:49:14 2015 -0800
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Mar 24 20:07:52 2015 +0100

--
 .../phoenix/mapreduce/PhoenixInputFormat.java   | 15 ++--
 .../phoenix/mapreduce/PhoenixRecordWriter.java  |  2 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  2 +-
 .../index/PhoenixIndexImportMapper.java |  2 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  | 88 ++--
 .../util/PhoenixConfigurationUtil.java  | 72 ++--
 .../mapreduce/util/PhoenixMapReduceUtil.java| 22 -
 .../util/PhoenixConfigurationUtilTest.java  | 60 -
 .../pig/util/QuerySchemaParserFunction.java |  2 +-
 .../pig/util/SqlQueryToColumnInfoFunction.java  |  2 +-
 10 files changed, 219 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7de8ee1e/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index a83b9ae..31759b4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -98,15 +98,16 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
  * @throws IOException
  * @throws SQLException
  */
-private QueryPlan getQueryPlan(final JobContext context,final 
Configuration configuration) throws IOException {
+private QueryPlan getQueryPlan(final JobContext context, final 
Configuration configuration)
+throws IOException {
 Preconditions.checkNotNull(context);
-try{
+try {
 final String currentScnValue = 
configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
 final Properties overridingProps = new Properties();
 if(currentScnValue != null) {
 overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
currentScnValue);
 }
-final Connection connection = 
ConnectionUtil.getConnection(configuration,overridingProps);
+final Connection connection = 
ConnectionUtil.getInputConnection(configuration, overridingProps);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
 Preconditions.checkNotNull(selectStatement);
 final Statement statement = connection.createStatement();
@@ -116,9 +117,11 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
 // Initialize the query plan so it sets up the parallel scans
 queryPlan.iterator();
 return queryPlan;
-} catch(Exception exception) {
-LOG.error(String.format(Failed to get the query plan with error 
[%s],exception.getMessage()));
+} catch (Exception exception) {
+LOG.error(String.format(Failed to get the query plan with error 
[%s],
+exception.getMessage()));
 throw new RuntimeException(exception);
 }
-   }
+}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7de8ee1e/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
index 4d26bf4..5843076 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
@@ -46,7 +46,7 @@ public class PhoenixRecordWriterT extends DBWritable  
extends RecordWriterNul
 private long numRecords = 0;
 
 public PhoenixRecordWriter(final Configuration configuration) 

phoenix git commit: PHOENIX-1653 Support separate clusters for MR jobs

2015-03-24 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 c92de2749 - af0d65a0a


PHOENIX-1653 Support separate clusters for MR jobs

Add support for the input and output formats of a Phoenix MapReduce job to
point to separate clusters using override configuration settings. Defaults to
existing behavior (HConstants.ZOOKEEPER_QUORUM)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/af0d65a0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/af0d65a0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/af0d65a0

Branch: refs/heads/4.x-HBase-0.98
Commit: af0d65a0abdbb8dc73d766a15e3ffa0e6d854d13
Parents: c92de27
Author: gjacoby gjac...@salesforce.com
Authored: Fri Feb 27 16:49:14 2015 -0800
Committer: Gabriel Reid gabri...@ngdata.com
Committed: Tue Mar 24 19:40:21 2015 +0100

--
 .../phoenix/mapreduce/PhoenixInputFormat.java   | 15 ++--
 .../phoenix/mapreduce/PhoenixRecordWriter.java  |  2 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  2 +-
 .../index/PhoenixIndexImportMapper.java |  2 +-
 .../phoenix/mapreduce/util/ConnectionUtil.java  | 88 ++--
 .../util/PhoenixConfigurationUtil.java  | 72 ++--
 .../mapreduce/util/PhoenixMapReduceUtil.java| 22 -
 .../util/PhoenixConfigurationUtilTest.java  | 60 -
 .../pig/util/QuerySchemaParserFunction.java |  2 +-
 .../pig/util/SqlQueryToColumnInfoFunction.java  |  2 +-
 10 files changed, 219 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/af0d65a0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
index a83b9ae..31759b4 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixInputFormat.java
@@ -98,15 +98,16 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
  * @throws IOException
  * @throws SQLException
  */
-private QueryPlan getQueryPlan(final JobContext context,final 
Configuration configuration) throws IOException {
+private QueryPlan getQueryPlan(final JobContext context, final 
Configuration configuration)
+throws IOException {
 Preconditions.checkNotNull(context);
-try{
+try {
 final String currentScnValue = 
configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
 final Properties overridingProps = new Properties();
 if(currentScnValue != null) {
 overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
currentScnValue);
 }
-final Connection connection = 
ConnectionUtil.getConnection(configuration,overridingProps);
+final Connection connection = 
ConnectionUtil.getInputConnection(configuration, overridingProps);
 final String selectStatement = 
PhoenixConfigurationUtil.getSelectStatement(configuration);
 Preconditions.checkNotNull(selectStatement);
 final Statement statement = connection.createStatement();
@@ -116,9 +117,11 @@ public class PhoenixInputFormatT extends DBWritable 
extends InputFormatNullWr
 // Initialize the query plan so it sets up the parallel scans
 queryPlan.iterator();
 return queryPlan;
-} catch(Exception exception) {
-LOG.error(String.format(Failed to get the query plan with error 
[%s],exception.getMessage()));
+} catch (Exception exception) {
+LOG.error(String.format(Failed to get the query plan with error 
[%s],
+exception.getMessage()));
 throw new RuntimeException(exception);
 }
-   }
+}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/af0d65a0/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
index 4d26bf4..5843076 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/PhoenixRecordWriter.java
@@ -46,7 +46,7 @@ public class PhoenixRecordWriterT extends DBWritable  
extends RecordWriterNul
 private long numRecords = 0;
 
 public PhoenixRecordWriter(final Configuration configuration) 

svn commit: r1661421 - in /phoenix/site: publish/tuning.html source/src/site/markdown/tuning.md

2015-02-21 Thread greid
Author: greid
Date: Sat Feb 21 18:05:54 2015
New Revision: 1661421

URL: http://svn.apache.org/r1661421
Log:
PHOENIX-1651 Add docs for phoenix.query.dateFormatTimeZone config property

Modified:
phoenix/site/publish/tuning.html
phoenix/site/source/src/site/markdown/tuning.md

Modified: phoenix/site/publish/tuning.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/tuning.html?rev=1661421r1=1661420r2=1661421view=diff
==
--- phoenix/site/publish/tuning.html (original)
+++ phoenix/site/publish/tuning.html Sat Feb 21 18:05:54 2015
@@ -222,151 +222,156 @@
td-MM-dd HH:mm:ss/td 
   /tr 
   tr class=b 
+   tdsmallphoenix.query.dateFormatTimeZone/small/td 
+   td style=text-align: left;A timezone id that specifies the default time 
zone in which date, time, and timestamp literals should be interpreted when 
interpreting string literals or using the ttTO_DATE/tt function. A time 
zone id can be a timezone abbreviation such as “PST”, or a full name such 
as “America/Los_Angeles”, or a custom offset such as “GMT-9:00”. The 
time zone id “LOCAL” can also be used to interpret all date, time, and 
timestamp literals as being in the current timezone of the client./td 
+   tdGMT/td 
+  /tr 
+  tr class=a 
tdsmallphoenix.query.numberFormat/small/td 
td style=text-align: left;Default pattern to use for conversion of a 
decimal number to/from a string, whether through the 
ttTO_CHAR(lt;decimal-numbergt;)/tt or 
ttTO_NUMBER(lt;decimal-stringgt;)/tt functions, or through 
ttresultSet.getString(lt;decimal-columngt;)/tt. Default is #,##0.###/td 
td#,##0.###/td 
   /tr 
-  tr class=a 
+  tr class=b 
tdsmallphoenix.mutate.maxSize/small/td 
td style=text-align: left;The maximum number of rows that may be 
batched on the client before a commit or rollback must be called./td 
td50/td 
   /tr 
-  tr class=b 
+  tr class=a 
tdsmallphoenix.mutate.batchSize/small/td 
td style=text-align: left;The number of rows that are batched together 
and automatically committed during the execution of an ttUPSERT SELECT/tt 
or ttDELETE/tt statement. This property may be overridden at connection 
time by specifying the ttUpsertBatchSize/tt property value. Note that the 
connection property value does not affect the batch size used by the 
coprocessor when these statements are executed completely on the server 
side./td 
td1000/td 
   /tr 
-  tr class=a 
+  tr class=b 
tdsmallphoenix.query.maxServerCacheBytes/small/td 
td style=text-align: left;Maximum size (in bytes) of a single sub-query 
result (usually the filtered result of a table) before compression and 
conversion to a hash map. Attempting to hash an intermediate sub-query result 
of a size bigger than this setting will result in a 
MaxServerCacheSizeExceededException. Default 100MB./td 
td104857600/td 
   /tr 
-  tr class=b 
+  tr class=a 
tdsmallphoenix.coprocessor.maxServerCacheTimeToLiveMs/small/td 
td style=text-align: left;Maximum living time (in milliseconds) of 
server caches. A cache entry expires after this amount of time has passed since 
last access. Consider adjusting this parameter when a server-side 
IOException(“Could not find hash cache for joinId”) happens. Getting 
warnings like “Earlier hash cache(s) might have expired on servers” might 
also be a sign that this number should be increased./td 
td3/td 
   /tr 
-  tr class=a 
+  tr class=b 
tdsmallphoenix.query.useIndexes/small/td 
td style=text-align: left;Determines whether or not indexes are 
considered by the optimizer to satisfy a query. Default is true /td 
tdtrue/td 
   /tr 
-  tr class=b 
+  tr class=a 
tdsmallphoenix.index.mutableBatchSizeThreshold/small/td 
td style=text-align: left;Number of mutations in a batch beyond which 
index metadata will be sent as a separate RPC to each region server as opposed 
to included inline with each mutation. Defaults to 5. /td 
td5/td 
   /tr 
-  tr class=a 
+  tr class=b 
tdsmallphoenix.schema.dropMetaData/small/td 
td style=text-align: left;Determines whether or not an HBase table is 
dropped when the Phoenix table is dropped. Default is true /td 
tdtrue/td 
   /tr 
-  tr class=b 
+  tr class=a 
tdsmallphoenix.groupby.spillable/small/td 
td style=text-align: left;Determines whether or not a GROUP BY over a 
large number of distinct values is allowed to spill to disk on the region 
server. If false, an InsufficientMemoryException will be thrown instead. 
Default is true /td 
tdtrue/td 
   /tr 
-  tr class=a 
+  tr class=b 
tdsmallphoenix.groupby.spillFiles/small/td 
td style=text-align: left;Number of memory mapped spill files to be 
used when spilling GROUP BY distinct values to disk. Default is 2 /td 
td2/td 
   /tr 
-  tr class=b 
+  tr class=a 
tdsmallphoenix.groupby.maxCacheSize/small/td 
td style=text-align: left;Size

  1   2   >