[2/2] phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds

2016-12-25 Thread ankit
PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dbd8459a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dbd8459a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dbd8459a

Branch: refs/heads/4.x-HBase-0.98
Commit: dbd8459af649bdd61927bc391c391ee41740fdf1
Parents: b477e54
Author: Ankit Singhal 
Authored: Mon Dec 26 13:28:32 2016 +0530
Committer: Ankit Singhal 
Committed: Mon Dec 26 13:28:32 2016 +0530

--
 .../phoenix/end2end/AutomaticRebuildIT.java | 219 +
 .../end2end/IndexToolForPartialBuildIT.java | 298 
 ...olForPartialBuildWithNamespaceEnabledIT.java |  70 +++
 .../phoenix/end2end/index/IndexMetadataIT.java  |  58 +++
 .../end2end/index/MutableIndexFailureIT.java|  10 +-
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |   4 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   9 +-
 .../coprocessor/MetaDataRegionObserver.java | 201 +---
 .../phoenix/exception/SQLExceptionCode.java |   3 +-
 .../index/PhoenixIndexFailurePolicy.java|  52 +--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   2 +
 .../apache/phoenix/jdbc/PhoenixStatement.java   |   8 +-
 .../phoenix/mapreduce/index/IndexTool.java  | 455 +--
 .../phoenix/mapreduce/index/IndexToolUtil.java  |   6 +-
 .../index/PhoenixIndexImportDirectMapper.java   |   2 +-
 .../index/PhoenixIndexPartialBuildMapper.java   | 182 
 .../util/PhoenixConfigurationUtil.java  |  31 ++
 .../phoenix/parse/AlterIndexStatement.java  |   8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   6 +-
 .../org/apache/phoenix/query/QueryServices.java |   4 +
 .../apache/phoenix/schema/MetaDataClient.java   |  47 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |  62 ++-
 22 files changed, 1457 insertions(+), 280 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dbd8459a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
new file mode 100644
index 000..cbb7745
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseIOException;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.PIndexState;
+import org.apache.phoenix.schema.PTableType;
+import org.apache.phoenix.util.PropertiesUtil;
+import 

[1/2] phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds

2016-12-25 Thread ankit
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 b477e5410 -> dbd8459af


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dbd8459a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
index 8488123..3349cf3 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
@@ -17,10 +17,17 @@
  */
 package org.apache.phoenix.mapreduce.index;
 
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ASYNC_REBUILD_TIMESTAMP;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.cli.CommandLine;
@@ -36,11 +43,15 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
+import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -48,9 +59,13 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.phoenix.compile.PostIndexDDLCompiler;
+import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
+import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.mapreduce.CsvBulkImportUtil;
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder;
@@ -62,9 +77,9 @@ import org.apache.phoenix.schema.PIndexState;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.ColumnInfo;
 import org.apache.phoenix.util.IndexUtil;
-import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -72,6 +87,8 @@ import org.apache.phoenix.util.TransactionUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 /**
  * An MR job to populate the index table from the data table.
  *
@@ -85,7 +102,11 @@ public class IndexTool extends Configured implements Tool {
 private static final Option DATA_TABLE_OPTION = new Option("dt", 
"data-table", true,
 "Data table name (mandatory)");
 private static final Option INDEX_TABLE_OPTION = new Option("it", 
"index-table", true,
-"Index table name(mandatory)");
+"Index table name(not required in case of partial rebuilding)");
+
+private static final Option PARTIAL_REBUILD_OPTION = new Option("pr", 
"partial-rebuild", false,
+"To build indexes for a data table from least disabledTimeStamp");
+
 private static final Option DIRECT_API_OPTION = new Option("direct", 
"direct", false,
 "If specified, we avoid the bulk load (optional)");
 private static final Option RUN_FOREGROUND_OPTION =
@@ -105,6 +126,7 @@ public class IndexTool extends Configured implements Tool {
 options.addOption(SCHEMA_NAME_OPTION);
 options.addOption(DATA_TABLE_OPTION);
 options.addOption(INDEX_TABLE_OPTION);
+options.addOption(PARTIAL_REBUILD_OPTION);
 options.addOption(DIRECT_API_OPTION);
 options.addOption(RUN_FOREGROUND_OPTION);
 options.addOption(OUTPUT_PATH_OPTION);
@@ -139,18 +161,18 @@ 

phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds(addendum)

2016-12-25 Thread ankit
Repository: phoenix
Updated Branches:
  refs/heads/master 83827cd8c -> e906841fb


PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds(addendum)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e906841f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e906841f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e906841f

Branch: refs/heads/master
Commit: e906841fb2c96615f0d50b06c48bdeda78379c94
Parents: 83827cd
Author: Ankit Singhal 
Authored: Mon Dec 26 12:28:14 2016 +0530
Committer: Ankit Singhal 
Committed: Mon Dec 26 12:28:14 2016 +0530

--
 .../end2end/IndexToolForPartialBuildWithNamespaceEnabledIT.java  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e906841f/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildWithNamespaceEnabledIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildWithNamespaceEnabledIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildWithNamespaceEnabledIT.java
index 5e16b05..4b2371c 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildWithNamespaceEnabledIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolForPartialBuildWithNamespaceEnabledIT.java
@@ -36,10 +36,10 @@ import com.google.common.collect.Maps;
  * Tests for the {@link IndexToolForPartialBuildWithNamespaceEnabled}
  */
 @RunWith(Parameterized.class)
-public class IndexToolForPartialBuildWithNamespaceEnabled extends 
IndexToolForPartialBuildIT {
+public class IndexToolForPartialBuildWithNamespaceEnabledIT extends 
IndexToolForPartialBuildIT {
 
 
-public IndexToolForPartialBuildWithNamespaceEnabled(boolean localIndex, 
boolean isNamespaceEnabled) {
+public IndexToolForPartialBuildWithNamespaceEnabledIT(boolean localIndex, 
boolean isNamespaceEnabled) {
 super(localIndex);
 this.isNamespaceEnabled=isNamespaceEnabled;
 }



[1/2] phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds

2016-12-25 Thread ankit
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 c1b8d79c2 -> 068c1cd96


http://git-wip-us.apache.org/repos/asf/phoenix/blob/068c1cd9/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
index 82b353c..e594e0d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
@@ -17,10 +17,17 @@
  */
 package org.apache.phoenix.mapreduce.index;
 
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ASYNC_REBUILD_TIMESTAMP;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.cli.CommandLine;
@@ -36,11 +43,15 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
+import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -48,9 +59,13 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.phoenix.compile.PostIndexDDLCompiler;
+import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
+import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.mapreduce.CsvBulkImportUtil;
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder;
@@ -62,9 +77,9 @@ import org.apache.phoenix.schema.PIndexState;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.ColumnInfo;
 import org.apache.phoenix.util.IndexUtil;
-import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -72,6 +87,8 @@ import org.apache.phoenix.util.TransactionUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 /**
  * An MR job to populate the index table from the data table.
  *
@@ -85,7 +102,11 @@ public class IndexTool extends Configured implements Tool {
 private static final Option DATA_TABLE_OPTION = new Option("dt", 
"data-table", true,
 "Data table name (mandatory)");
 private static final Option INDEX_TABLE_OPTION = new Option("it", 
"index-table", true,
-"Index table name(mandatory)");
+"Index table name(not required in case of partial rebuilding)");
+
+private static final Option PARTIAL_REBUILD_OPTION = new Option("pr", 
"partial-rebuild", false,
+"To build indexes for a data table from least disabledTimeStamp");
+
 private static final Option DIRECT_API_OPTION = new Option("direct", 
"direct", false,
 "If specified, we avoid the bulk load (optional)");
 private static final Option RUN_FOREGROUND_OPTION =
@@ -105,6 +126,7 @@ public class IndexTool extends Configured implements Tool {
 options.addOption(SCHEMA_NAME_OPTION);
 options.addOption(DATA_TABLE_OPTION);
 options.addOption(INDEX_TABLE_OPTION);
+options.addOption(PARTIAL_REBUILD_OPTION);
 options.addOption(DIRECT_API_OPTION);
 options.addOption(RUN_FOREGROUND_OPTION);
 options.addOption(OUTPUT_PATH_OPTION);
@@ -139,18 +161,18 @@ public 

[2/2] phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds

2016-12-25 Thread ankit
PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/068c1cd9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/068c1cd9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/068c1cd9

Branch: refs/heads/4.x-HBase-1.1
Commit: 068c1cd96db6b46e80721cb66be9263e0565f5d1
Parents: c1b8d79
Author: Ankit Singhal 
Authored: Mon Dec 26 12:24:19 2016 +0530
Committer: Ankit Singhal 
Committed: Mon Dec 26 12:24:19 2016 +0530

--
 .../phoenix/end2end/AutomaticRebuildIT.java | 219 +
 .../end2end/IndexToolForPartialBuildIT.java | 298 
 ...olForPartialBuildWithNamespaceEnabledIT.java |  70 +++
 .../phoenix/end2end/index/IndexMetadataIT.java  |  58 +++
 .../end2end/index/MutableIndexFailureIT.java|   8 +-
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |   4 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   9 +-
 .../coprocessor/MetaDataRegionObserver.java | 291 +++-
 .../phoenix/exception/SQLExceptionCode.java |   3 +-
 .../index/PhoenixIndexFailurePolicy.java|  52 +--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   2 +
 .../apache/phoenix/jdbc/PhoenixStatement.java   |   8 +-
 .../phoenix/mapreduce/index/IndexTool.java  | 455 +--
 .../phoenix/mapreduce/index/IndexToolUtil.java  |   6 +-
 .../index/PhoenixIndexImportDirectMapper.java   |   2 +-
 .../index/PhoenixIndexPartialBuildMapper.java   | 182 
 .../util/PhoenixConfigurationUtil.java  |  31 ++
 .../phoenix/parse/AlterIndexStatement.java  |   8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   6 +-
 .../org/apache/phoenix/query/QueryServices.java |   4 +
 .../apache/phoenix/schema/MetaDataClient.java   |  47 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |  61 ++-
 22 files changed, 1503 insertions(+), 321 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/068c1cd9/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
new file mode 100644
index 000..cbb7745
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseIOException;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.PIndexState;
+import org.apache.phoenix.schema.PTableType;
+import org.apache.phoenix.util.PropertiesUtil;
+import 

Build failed in Jenkins: Phoenix | Master #1525

2016-12-25 Thread Apache Jenkins Server
See 

Changes:

[ankitsinghal59] PHOENIX-2890 Extend IndexTool to allow incremental index 
rebuilds

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H10 (ubuntu) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git -c core.askpass=true fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git rev-parse origin/master^{commit} # timeout=10
Checking out Revision 83827cd8c2876c6b6dccf3a5678889b40a76261b (origin/master)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 83827cd8c2876c6b6dccf3a5678889b40a76261b
 > git rev-list 70dc3836ac4d472bb55e79a723a1d2a1d30d75de # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
MAVEN_OPTS=-Xmx3G

[EnvInject] - Variables injected successfully.
[Phoenix-master] $ /bin/bash -xe /tmp/hudson4947388969268014812.sh
+ ls /home/jenkins/.m2/repository/org/apache/htrace
htrace
htrace-core
htrace-core4
+ ls /home/jenkins/.m2/repository/org/apache/hbase
hbase
hbase-annotations
hbase-archetype-builder
hbase-archetypes
hbase-assembly
hbase-checkstyle
hbase-client
hbase-client-project
hbase-client-project-archetype
hbase-common
hbase-endpoint
hbase-examples
hbase-external-blockcache
hbase-hadoop1-compat
hbase-hadoop2-compat
hbase-hadoop-compat
hbase-it
hbase-prefix-tree
hbase-procedure
hbase-protocol
hbase-protocol-shaded
hbase-resource-bundle
hbase-rest
hbase-rsgroup
hbase-server
hbase-shaded
hbase-shaded-client
hbase-shaded-client-project
hbase-shaded-client-project-archetype
hbase-shaded-server
hbase-shell
hbase-spark
hbase-testing-util
hbase-thrift
[Phoenix-master] $ /home/jenkins/tools/maven/latest3/bin/mvn -U clean install 
-Dcheckstyle.skip=true
[INFO] Scanning for projects...
[INFO] 
[INFO] Reactor Build Order:
[INFO] 
[INFO] Apache Phoenix
[INFO] Phoenix Core
[INFO] Phoenix - Flume
[INFO] Phoenix - Pig
[INFO] Phoenix Query Server Client
[INFO] Phoenix Query Server
[INFO] Phoenix - Pherf
[INFO] Phoenix - Spark
[INFO] Phoenix - Hive
[INFO] Phoenix Client
[INFO] Phoenix Server
[INFO] Phoenix Assembly
[INFO] Phoenix - Tracing Web Application
[INFO] 
[INFO] 
[INFO] Building Apache Phoenix 4.10.0-HBase-1.2-SNAPSHOT
[INFO] 
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ phoenix ---
[INFO] Deleting 
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.13:check (validate) @ phoenix ---
[INFO] 
[INFO] --- maven-remote-resources-plugin:1.5:process (default) @ phoenix ---
[INFO] 
[INFO] --- maven-source-plugin:2.2.1:jar-no-fork (attach-sources) @ phoenix ---
[INFO] 
[INFO] --- maven-jar-plugin:2.4:test-jar (default) @ phoenix ---
[WARNING] JAR will be empty - no content was marked for inclusion!
[INFO] Building jar: 

[INFO] 
[INFO] --- maven-site-plugin:3.2:attach-descriptor (attach-descriptor) @ 
phoenix ---
[INFO] 
[INFO] --- maven-install-plugin:2.5.2:install (default-install) @ phoenix ---
[INFO] Installing  to 
/home/jenkins/.m2/repository/org/apache/phoenix/phoenix/4.10.0-HBase-1.2-SNAPSHOT/phoenix-4.10.0-HBase-1.2-SNAPSHOT.pom
[INFO] Installing 

 to 
/home/jenkins/.m2/repository/org/apache/phoenix/phoenix/4.10.0-HBase-1.2-SNAPSHOT/phoenix-4.10.0-HBase-1.2-SNAPSHOT-tests.jar
[INFO] 
[INFO] 
[INFO] Building Phoenix Core 4.10.0-HBase-1.2-SNAPSHOT
[INFO] 
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ phoenix-core ---
[INFO] Deleting 

[INFO] 
[INFO] --- maven-checkstyle-plugin:2.13:check (validate) @ phoenix-core ---
[INFO] 
[INFO] --- 

[1/2] phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds

2016-12-25 Thread ankit
Repository: phoenix
Updated Branches:
  refs/heads/master 70dc3836a -> 83827cd8c


http://git-wip-us.apache.org/repos/asf/phoenix/blob/83827cd8/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
index 82b353c..e594e0d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
@@ -17,10 +17,17 @@
  */
 package org.apache.phoenix.mapreduce.index;
 
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ASYNC_REBUILD_TIMESTAMP;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP;
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.cli.CommandLine;
@@ -36,11 +43,15 @@ import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
 import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
+import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -48,9 +59,13 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.phoenix.compile.PostIndexDDLCompiler;
+import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
+import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.mapreduce.CsvBulkImportUtil;
 import org.apache.phoenix.mapreduce.util.ColumnInfoToStringEncoderDecoder;
@@ -62,9 +77,9 @@ import org.apache.phoenix.schema.PIndexState;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.ColumnInfo;
 import org.apache.phoenix.util.IndexUtil;
-import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -72,6 +87,8 @@ import org.apache.phoenix.util.TransactionUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 /**
  * An MR job to populate the index table from the data table.
  *
@@ -85,7 +102,11 @@ public class IndexTool extends Configured implements Tool {
 private static final Option DATA_TABLE_OPTION = new Option("dt", 
"data-table", true,
 "Data table name (mandatory)");
 private static final Option INDEX_TABLE_OPTION = new Option("it", 
"index-table", true,
-"Index table name(mandatory)");
+"Index table name(not required in case of partial rebuilding)");
+
+private static final Option PARTIAL_REBUILD_OPTION = new Option("pr", 
"partial-rebuild", false,
+"To build indexes for a data table from least disabledTimeStamp");
+
 private static final Option DIRECT_API_OPTION = new Option("direct", 
"direct", false,
 "If specified, we avoid the bulk load (optional)");
 private static final Option RUN_FOREGROUND_OPTION =
@@ -105,6 +126,7 @@ public class IndexTool extends Configured implements Tool {
 options.addOption(SCHEMA_NAME_OPTION);
 options.addOption(DATA_TABLE_OPTION);
 options.addOption(INDEX_TABLE_OPTION);
+options.addOption(PARTIAL_REBUILD_OPTION);
 options.addOption(DIRECT_API_OPTION);
 options.addOption(RUN_FOREGROUND_OPTION);
 options.addOption(OUTPUT_PATH_OPTION);
@@ -139,18 +161,18 @@ public class 

[2/2] phoenix git commit: PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds

2016-12-25 Thread ankit
PHOENIX-2890 Extend IndexTool to allow incremental index rebuilds


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/83827cd8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/83827cd8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/83827cd8

Branch: refs/heads/master
Commit: 83827cd8c2876c6b6dccf3a5678889b40a76261b
Parents: 70dc383
Author: Ankit Singhal 
Authored: Mon Dec 26 11:59:19 2016 +0530
Committer: Ankit Singhal 
Committed: Mon Dec 26 11:59:19 2016 +0530

--
 .../phoenix/end2end/AutomaticRebuildIT.java | 219 +
 .../end2end/IndexToolForPartialBuildIT.java | 298 
 ...olForPartialBuildWithNamespaceEnabledIT.java |  70 +++
 .../phoenix/end2end/index/IndexMetadataIT.java  |  58 +++
 .../end2end/index/MutableIndexFailureIT.java|  10 +-
 phoenix-core/src/main/antlr3/PhoenixSQL.g   |   4 +-
 .../coprocessor/MetaDataEndpointImpl.java   |   9 +-
 .../coprocessor/MetaDataRegionObserver.java | 291 +++-
 .../phoenix/exception/SQLExceptionCode.java |   3 +-
 .../index/PhoenixIndexFailurePolicy.java|  52 +--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   2 +
 .../apache/phoenix/jdbc/PhoenixStatement.java   |   8 +-
 .../phoenix/mapreduce/index/IndexTool.java  | 455 +--
 .../phoenix/mapreduce/index/IndexToolUtil.java  |   6 +-
 .../index/PhoenixIndexImportDirectMapper.java   |   2 +-
 .../index/PhoenixIndexPartialBuildMapper.java   | 182 
 .../util/PhoenixConfigurationUtil.java  |  31 ++
 .../phoenix/parse/AlterIndexStatement.java  |   8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   6 +-
 .../org/apache/phoenix/query/QueryServices.java |   4 +
 .../apache/phoenix/schema/MetaDataClient.java   |  47 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |  61 ++-
 22 files changed, 1504 insertions(+), 322 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/83827cd8/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
new file mode 100644
index 000..cbb7745
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AutomaticRebuildIT.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseIOException;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.PIndexState;
+import org.apache.phoenix.schema.PTableType;
+import org.apache.phoenix.util.PropertiesUtil;
+import 

Apache-Phoenix | Phoenix-4.8-HBase-1.2 | Build Successful

2016-12-25 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.8-HBase-1.2

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-4.8-HBase-1.2/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-4.8-HBase-1.2/lastCompletedBuild/testReport/

Changes
[elserj] PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Build failed in Jenkins: Phoenix-4.8-HBase-1.1 #59

2016-12-25 Thread Apache Jenkins Server
See 

Changes:

[elserj] PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()

--
[...truncated 707 lines...]
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 95.231 sec - in 
org.apache.phoenix.trace.PhoenixTracingEndToEndIT
Running org.apache.phoenix.tx.TxCheckpointIT
Tests run: 38, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 686.967 sec - 
in org.apache.phoenix.end2end.index.LocalIndexIT
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 196.932 sec - 
in org.apache.phoenix.tx.TxCheckpointIT
Tests run: 21, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 303.597 sec - 
in org.apache.phoenix.tx.TransactionIT
Tests run: 40, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 947.698 sec - 
in org.apache.phoenix.end2end.index.MutableIndexIT
Tests run: 136, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1,865.851 sec 
- in org.apache.phoenix.end2end.index.IndexIT

Results :

Tests run: 1239, Failures: 0, Errors: 0, Skipped: 5

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTableReuseTest) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.245 sec - in 
org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.ArrayToStringFunctionIT
Running org.apache.phoenix.end2end.AlterSessionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.268 sec - in 
org.apache.phoenix.end2end.AlterSessionIT
Running org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.ArrayFillFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.98 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.14 sec - in 
org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.72 sec - in 
org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 36, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 37.591 sec - 
in org.apache.phoenix.end2end.ArrayToStringFunctionIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 34.452 sec - in 
org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Running org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 23.156 sec - in 
org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.205 sec - in 
org.apache.phoenix.end2end.DynamicFamilyIT
Running org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.265 sec - in 
org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 68.522 sec - 
in org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.772 sec - in 
org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Running org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.LikeExpressionIT
Running org.apache.phoenix.end2end.DistinctPrefixFilterIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.645 sec - in 
org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.165 sec - in 
org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.047 sec - in 
org.apache.phoenix.end2end.LikeExpressionIT
Running org.apache.phoenix.end2end.NthValueFunctionIT
Running org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.496 sec - in 
org.apache.phoenix.end2end.FirstValueFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.518 sec - in 
org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Running org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.015 sec - in 
org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 100.869 sec - 
in org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.PrimitiveTypeIT
Tests run: 9, Failures: 

Build failed in Jenkins: Phoenix | Master #1524

2016-12-25 Thread Apache Jenkins Server
See 

Changes:

[elserj] PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()

--
[...truncated 842 lines...]
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 183.317 sec - 
in org.apache.phoenix.end2end.CreateTableIT
Running org.apache.phoenix.end2end.SequenceBulkAllocationIT
Tests run: 105, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 115.532 sec - 
in org.apache.phoenix.end2end.GroupByIT
Running org.apache.phoenix.end2end.ReadIsolationLevelIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.021 sec - in 
org.apache.phoenix.end2end.ReadIsolationLevelIT
Running org.apache.phoenix.end2end.ToNumberFunctionIT
Tests run: 56, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.1 sec - in 
org.apache.phoenix.end2end.SequenceBulkAllocationIT
Tests run: 245, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 243.724 sec - 
in org.apache.phoenix.end2end.ClientTimeArithmeticQueryIT
Running org.apache.phoenix.end2end.TopNIT
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.319 sec - 
in org.apache.phoenix.end2end.ToNumberFunctionIT
Running org.apache.phoenix.end2end.TruncateFunctionIT
Running org.apache.phoenix.end2end.SequenceIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.939 sec - in 
org.apache.phoenix.end2end.TopNIT
Running org.apache.phoenix.end2end.UpsertValuesIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.656 sec - in 
org.apache.phoenix.end2end.TruncateFunctionIT
Running org.apache.phoenix.end2end.VariableLengthPKIT
Tests run: 119, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 106.771 sec - 
in org.apache.phoenix.end2end.ScanQueryIT
Running org.apache.phoenix.end2end.UpsertSelectIT
Tests run: 126, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 151.745 sec - 
in org.apache.phoenix.end2end.QueryIT
Tests run: 19, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 224.985 sec - 
in org.apache.phoenix.end2end.QueryDatabaseMetaDataIT
Running org.apache.phoenix.end2end.salted.SaltedTableIT
Tests run: 54, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 92.912 sec - 
in org.apache.phoenix.end2end.SequenceIT
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.969 sec - in 
org.apache.phoenix.end2end.salted.SaltedTableIT
Tests run: 50, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 92.814 sec - 
in org.apache.phoenix.end2end.VariableLengthPKIT
Running org.apache.phoenix.rpc.UpdateCacheWithScnIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.452 sec - in 
org.apache.phoenix.rpc.UpdateCacheWithScnIT
Tests run: 46, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 245.372 sec - 
in org.apache.phoenix.end2end.RowValueConstructorIT
Tests run: 25, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 241.316 sec - 
in org.apache.phoenix.end2end.UpsertValuesIT
Tests run: 22, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 257.535 sec - 
in org.apache.phoenix.end2end.UpsertSelectIT

Results :

Tests run: 1359, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTests) @ phoenix-core ---

---
 T E S T S
---

Results :

Tests run: 0, Failures: 0, Errors: 0, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(NeedTheirOwnClusterTests) @ phoenix-core ---

---
 T E S T S
---
Running 
org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
Running org.apache.phoenix.end2end.ConnectionUtilIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 50.484 sec - in 
org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
Running org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
Running org.apache.phoenix.end2end.ContextClassloaderIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 47.981 sec - in 
org.apache.phoenix.end2end.ConnectionUtilIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.498 sec - in 
org.apache.phoenix.end2end.CountDistinctCompressionIT
Running org.apache.phoenix.end2end.FlappingLocalIndexIT
Running org.apache.phoenix.end2end.IndexExtendedIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.936 sec - in 
org.apache.phoenix.end2end.ContextClassloaderIT
Running org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.QueryTimeoutIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.318 sec - in 
org.apache.phoenix.end2end.QueryWithLimitIT
Running org.apache.phoenix.end2end.RenewLeaseIT
Running org.apache.phoenix.end2end.SpillableGroupByIT
Running 

Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2016-12-25 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-0.98

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[elserj] PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache-Phoenix | 4.8-HBase-1.0 | Build Successful

2016-12-25 Thread Apache Jenkins Server
4.8-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.8-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.8-HBase-1.0/lastCompletedBuild/testReport/

Changes
[elserj] PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


[1/7] phoenix git commit: PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()

2016-12-25 Thread elserj
Repository: phoenix
Updated Branches:
  refs/heads/4.8-HBase-0.98 d7d28f19b -> 28bf0ef55
  refs/heads/4.8-HBase-1.0 b7cd8b500 -> 265a5b10c
  refs/heads/4.8-HBase-1.1 d5d856391 -> c8499e536
  refs/heads/4.8-HBase-1.2 45b789223 -> e55a27901
  refs/heads/4.x-HBase-0.98 54b7c218d -> b477e5410
  refs/heads/4.x-HBase-1.1 cce9a9f35 -> c1b8d79c2
  refs/heads/master e23634a35 -> 70dc3836a


PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/70dc3836
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/70dc3836
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/70dc3836

Branch: refs/heads/master
Commit: 70dc3836ac4d472bb55e79a723a1d2a1d30d75de
Parents: e23634a
Author: Josh Elser 
Authored: Wed Nov 9 13:30:15 2016 -0500
Committer: Josh Elser 
Committed: Sun Dec 25 21:44:03 2016 -0500

--
 .../end2end/ConvertTimezoneFunctionIT.java  | 22 +++-
 .../function/ConvertTimezoneFunction.java   |  5 +++--
 2 files changed, 24 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/70dc3836/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
index 229e705..a51b6c9 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
@@ -39,7 +39,7 @@ import org.junit.Test;
 public class ConvertTimezoneFunctionIT extends ParallelStatsDisabledIT {
 
 @Test
-public void testConvertTimezoneEurope() throws Exception {
+public void testDateConvertTimezoneEurope() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String timezone_offset_test = generateUniqueName();
 String ddl = "CREATE TABLE IF NOT EXISTS " + timezone_offset_test
@@ -59,6 +59,26 @@ public class ConvertTimezoneFunctionIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
+public void testTimestampConvertTimezoneEurope() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String timezone_offset_test = generateUniqueName();
+String ddl = "CREATE TABLE IF NOT EXISTS " + timezone_offset_test
++ " (k1 INTEGER NOT NULL, timestamps TIMESTAMP CONSTRAINT pk 
PRIMARY KEY (k1))";
+conn.createStatement().execute(ddl);
+String dml = "UPSERT INTO " + timezone_offset_test
++ " (k1, timestamps) VALUES (1, TO_TIMESTAMP('2014-03-01 
00:00:00'))";
+conn.createStatement().execute(dml);
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(
+"SELECT k1, timestamps, CONVERT_TZ(timestamps, 'UTC', 
'Europe/Prague') FROM "
++ timezone_offset_test);
+
+assertTrue(rs.next());
+assertEquals(139363560L, rs.getDate(3).getTime()); //Sat, 01 Mar 
2014 01:00:00
+}
+
+@Test
 public void testConvertTimezoneAmerica() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String timezone_offset_test = generateUniqueName();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/70dc3836/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
index f06ddbc..8d13ab6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
@@ -23,10 +23,11 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.cache.JodaTimezoneCache;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.parse.FunctionParseNode;
+import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDate;
+import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.schema.types.PVarchar;
-import org.apache.phoenix.schema.tuple.Tuple;
 import org.joda.time.DateTimeZone;
 
 /**
@@ -35,7 +36,7 @@ import org.joda.time.DateTimeZone;
  *
  */
 @FunctionParseNode.BuiltInFunction(name = 

[4/7] phoenix git commit: PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()

2016-12-25 Thread elserj
PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e55a2790
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e55a2790
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e55a2790

Branch: refs/heads/4.8-HBase-1.2
Commit: e55a2790156404b34c57548fd5605061bb9b64e6
Parents: 45b7892
Author: Josh Elser 
Authored: Wed Nov 9 13:30:15 2016 -0500
Committer: Josh Elser 
Committed: Sun Dec 25 22:00:47 2016 -0500

--
 .../end2end/ConvertTimezoneFunctionIT.java  | 22 +++-
 .../function/ConvertTimezoneFunction.java   |  5 +++--
 2 files changed, 24 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e55a2790/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
index 1fc4b2c..ea7a9b2 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
@@ -39,7 +39,7 @@ import org.junit.Test;
 public class ConvertTimezoneFunctionIT extends 
BaseHBaseManagedTimeTableReuseIT {
 
 @Test
-public void testConvertTimezoneEurope() throws Exception {
+public void testDateConvertTimezoneEurope() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String timezone_offset_test = generateRandomString();
 String ddl = "CREATE TABLE IF NOT EXISTS " + timezone_offset_test
@@ -59,6 +59,26 @@ public class ConvertTimezoneFunctionIT extends 
BaseHBaseManagedTimeTableReuseIT
 }
 
 @Test
+public void testTimestampConvertTimezoneEurope() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String timezone_offset_test = generateRandomString();
+String ddl = "CREATE TABLE IF NOT EXISTS " + timezone_offset_test
++ " (k1 INTEGER NOT NULL, timestamps TIMESTAMP CONSTRAINT pk 
PRIMARY KEY (k1))";
+conn.createStatement().execute(ddl);
+String dml = "UPSERT INTO " + timezone_offset_test
++ " (k1, timestamps) VALUES (1, TO_TIMESTAMP('2014-03-01 
00:00:00'))";
+conn.createStatement().execute(dml);
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(
+"SELECT k1, timestamps, CONVERT_TZ(timestamps, 'UTC', 
'Europe/Prague') FROM "
++ timezone_offset_test);
+
+assertTrue(rs.next());
+assertEquals(139363560L, rs.getDate(3).getTime()); //Sat, 01 Mar 
2014 01:00:00
+}
+
+@Test
 public void testConvertTimezoneAmerica() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String timezone_offset_test = generateRandomString();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e55a2790/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
index f06ddbc..8d13ab6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
@@ -23,10 +23,11 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.cache.JodaTimezoneCache;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.parse.FunctionParseNode;
+import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDate;
+import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.schema.types.PVarchar;
-import org.apache.phoenix.schema.tuple.Tuple;
 import org.joda.time.DateTimeZone;
 
 /**
@@ -35,7 +36,7 @@ import org.joda.time.DateTimeZone;
  *
  */
 @FunctionParseNode.BuiltInFunction(name = ConvertTimezoneFunction.NAME, args = 
{
-@FunctionParseNode.Argument(allowedTypes = { PDate.class }),
+@FunctionParseNode.Argument(allowedTypes = { PTimestamp.class }),
 @FunctionParseNode.Argument(allowedTypes = { PVarchar.class }),
 @FunctionParseNode.Argument(allowedTypes = { PVarchar.class })})
 public class ConvertTimezoneFunction extends 

[7/7] phoenix git commit: PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()

2016-12-25 Thread elserj
PHOENIX-32451 Support DATE and TIMESTAMP in CONVERT_TZ()


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/28bf0ef5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/28bf0ef5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/28bf0ef5

Branch: refs/heads/4.8-HBase-0.98
Commit: 28bf0ef55ee67e66e99958e4b9700d21de7e7d94
Parents: d7d28f1
Author: Josh Elser 
Authored: Wed Nov 9 13:30:15 2016 -0500
Committer: Josh Elser 
Committed: Sun Dec 25 22:16:09 2016 -0500

--
 .../end2end/ConvertTimezoneFunctionIT.java  | 22 +++-
 .../function/ConvertTimezoneFunction.java   |  5 +++--
 2 files changed, 24 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/28bf0ef5/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
index 1fc4b2c..ea7a9b2 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConvertTimezoneFunctionIT.java
@@ -39,7 +39,7 @@ import org.junit.Test;
 public class ConvertTimezoneFunctionIT extends 
BaseHBaseManagedTimeTableReuseIT {
 
 @Test
-public void testConvertTimezoneEurope() throws Exception {
+public void testDateConvertTimezoneEurope() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String timezone_offset_test = generateRandomString();
 String ddl = "CREATE TABLE IF NOT EXISTS " + timezone_offset_test
@@ -59,6 +59,26 @@ public class ConvertTimezoneFunctionIT extends 
BaseHBaseManagedTimeTableReuseIT
 }
 
 @Test
+public void testTimestampConvertTimezoneEurope() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String timezone_offset_test = generateRandomString();
+String ddl = "CREATE TABLE IF NOT EXISTS " + timezone_offset_test
++ " (k1 INTEGER NOT NULL, timestamps TIMESTAMP CONSTRAINT pk 
PRIMARY KEY (k1))";
+conn.createStatement().execute(ddl);
+String dml = "UPSERT INTO " + timezone_offset_test
++ " (k1, timestamps) VALUES (1, TO_TIMESTAMP('2014-03-01 
00:00:00'))";
+conn.createStatement().execute(dml);
+conn.commit();
+
+ResultSet rs = conn.createStatement().executeQuery(
+"SELECT k1, timestamps, CONVERT_TZ(timestamps, 'UTC', 
'Europe/Prague') FROM "
++ timezone_offset_test);
+
+assertTrue(rs.next());
+assertEquals(139363560L, rs.getDate(3).getTime()); //Sat, 01 Mar 
2014 01:00:00
+}
+
+@Test
 public void testConvertTimezoneAmerica() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String timezone_offset_test = generateRandomString();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/28bf0ef5/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
index f06ddbc..8d13ab6 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/ConvertTimezoneFunction.java
@@ -23,10 +23,11 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.cache.JodaTimezoneCache;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.parse.FunctionParseNode;
+import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PDate;
+import org.apache.phoenix.schema.types.PTimestamp;
 import org.apache.phoenix.schema.types.PVarchar;
-import org.apache.phoenix.schema.tuple.Tuple;
 import org.joda.time.DateTimeZone;
 
 /**
@@ -35,7 +36,7 @@ import org.joda.time.DateTimeZone;
  *
  */
 @FunctionParseNode.BuiltInFunction(name = ConvertTimezoneFunction.NAME, args = 
{
-@FunctionParseNode.Argument(allowedTypes = { PDate.class }),
+@FunctionParseNode.Argument(allowedTypes = { PTimestamp.class }),
 @FunctionParseNode.Argument(allowedTypes = { PVarchar.class }),
 @FunctionParseNode.Argument(allowedTypes = { PVarchar.class })})
 public class ConvertTimezoneFunction