(phoenix-queryserver) branch master updated: PHOENIX-7143 Detect JVM version and add the necessary module flags in startup scripts (#142)

2023-12-11 Thread ssa
This is an automated email from the ASF dual-hosted git repository.

ssa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-queryserver.git


The following commit(s) were added to refs/heads/master by this push:
 new da71f55  PHOENIX-7143 Detect JVM version and add the necessary module 
flags in startup scripts (#142)
da71f55 is described below

commit da71f55dcf4216024b659126d1258ded2e2d19ef
Author: Istvan Toth 
AuthorDate: Tue Dec 12 02:10:47 2023 +0100

PHOENIX-7143 Detect JVM version and add the necessary module flags in 
startup scripts (#142)
---
 bin/phoenix_queryserver_utils.py | 81 
 bin/queryserver.py   | 47 +--
 bin/sqlline-thin.py  |  3 +-
 3 files changed, 100 insertions(+), 31 deletions(-)

diff --git a/bin/phoenix_queryserver_utils.py b/bin/phoenix_queryserver_utils.py
index 840c1ad..2d66435 100755
--- a/bin/phoenix_queryserver_utils.py
+++ b/bin/phoenix_queryserver_utils.py
@@ -21,6 +21,7 @@
 
 import os
 import fnmatch
+import re
 import subprocess
 
 def find(pattern, classPaths):
@@ -172,6 +173,81 @@ def setPath():
 logging_jar += 
":"+findFileInPathWithoutRecursion(LOGGING_JAR_PATTERN2, 
os.path.join(current_dir, "..","lib"))
 logging_jar += 
":"+findFileInPathWithoutRecursion(LOGGING_JAR_PATTERN3, 
os.path.join(current_dir, "..","lib"))
 
+__set_java_home()
+__set_jvm_flags()
+return ""
+
+
+def __set_java_home():
+global hbase_env
+global java_home
+global java
+java_home = os.getenv('JAVA_HOME')
+java = 'java'
+
+# HBase configuration folder path (where hbase-site.xml reside) for
+# HBase/Phoenix client side property override
+hbase_config_path = hbase_conf_dir
+
+# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
+hbase_env_path = None
+hbase_env_cmd  = None
+if os.name == 'posix':
+hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
+hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
+elif os.name == 'nt':
+hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
+hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
+if not hbase_env_path or not hbase_env_cmd:
+sys.stderr.write("hbase-env file unknown on platform 
{}{}".format(os.name, os.linesep))
+sys.exit(-1)
+
+hbase_env = {}
+if os.path.isfile(hbase_env_path):
+p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
+for x in p.stdout:
+(k, _, v) = tryDecode(x).partition('=')
+hbase_env[k.strip()] = v.strip()
+
+if 'JAVA_HOME' in hbase_env:
+java_home = hbase_env['JAVA_HOME']
+
+if java_home:
+java = os.path.join(java_home, 'bin', 'java')
+
+return ""
+
+
+def __set_jvm_flags():
+global jvm_module_flags
+jvm_module_flags = ""
+# This should be ASCII
+version_output = subprocess.check_output([java, "-version"], 
stderr=subprocess.STDOUT).decode()
+version_output = tryDecode(version_output)
+m = re.search(r'version\s"(\d+)\.(\d+)', version_output)
+if (m is None):
+# Could not find version
+return ""
+major = m.group(1)
+minor = m.group(2)
+if (major is None or minor is None):
+#Could not identify version
+return ""
+if (minor == "1"):
+major = minor
+if (int(major) >= 11):
+# Copied from hbase startup script
+jvm_module_flags = 
"-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true \
+--add-modules jdk.unsupported \
+--add-opens java.base/java.nio=ALL-UNNAMED \
+--add-opens java.base/sun.nio.ch=ALL-UNNAMED \
+--add-opens java.base/java.lang=ALL-UNNAMED \
+--add-opens java.base/jdk.internal.ref=ALL-UNNAMED \
+--add-opens java.base/java.lang.reflect=ALL-UNNAMED \
+--add-exports java.base/jdk.internal.misc=ALL-UNNAMED \
+--add-exports java.security.jgss/sun.security.krb5=ALL-UNNAMED \
+--add-exports java.base/sun.net.dns=ALL-UNNAMED \
+--add-exports java.base/sun.net.util=ALL-UNNAMED"
 return ""
 
 def shell_quote(args):
@@ -208,3 +284,8 @@ if __name__ == "__main__":
 print("phoenix_thin_client_jar:", phoenix_thin_client_jar)
 print("sqlline_with_deps_jar", sqlline_with_deps_jar)
 print("slf4j_backend_jar:", slf4j_backend_jar)
+print("java_home:", java_home)
+print("java:", java)
+print("jvm_module_flags:", jvm_module_flags)
+print("hbase_env:", hbase_env)
+
diff --git a/bin/queryserver.py b/bin/queryserver.py
index 9609157..8120d9b 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -82,33 +82,

[phoenix] branch 5.1 updated: PHOENIX-6855 Upgrade from 4.7 to 5+ fails if any of the local indexes exist.

2023-01-18 Thread ssa
This is an automated email from the ASF dual-hosted git repository.

ssa pushed a commit to branch 5.1
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.1 by this push:
 new 335c52fc07 PHOENIX-6855 Upgrade from 4.7 to 5+ fails if any of the 
local indexes exist.
335c52fc07 is described below

commit 335c52fc07e217388a12bfa0fab5388b4cc9cef9
Author: Sergey Soldatov 
AuthorDate: Mon Jan 16 21:53:01 2023 -0800

PHOENIX-6855 Upgrade from 4.7 to 5+ fails if any of the local indexes exist.
---
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java  | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 99c434f924..86ed6713ee 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -374,6 +374,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 private final int maxInternalConnectionsAllowed;
 private final boolean shouldThrottleNumConnections;
 public static final byte[] MUTEX_LOCKED = 
"MUTEX_LOCKED".getBytes(StandardCharsets.UTF_8);
+private boolean localIndexUpgradeRequired;
 
 private static interface FeatureSupported {
 boolean isSupported(ConnectionQueryServices services);
@@ -3746,7 +3747,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 metaConnection = UpgradeUtil.disableViewIndexes(metaConnection);
 if 
(getProps().getBoolean(QueryServices.LOCAL_INDEX_CLIENT_UPGRADE_ATTRIB,
   QueryServicesOptions.DEFAULT_LOCAL_INDEX_CLIENT_UPGRADE)) {
-metaConnection = 
UpgradeUtil.upgradeLocalIndexes(metaConnection);
+localIndexUpgradeRequired = true;
 }
 ConnectionQueryServicesImpl.this.removeTable(null,
   PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME, null,
@@ -4004,6 +4005,13 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 // snapshot entries
 metaConnection = upgradeOtherSystemTablesIfRequired(metaConnection,
 moveChildLinks, systemTableToSnapshotMap);
+
+// Once the system tables are upgraded the local index upgrade can 
be done
+if (localIndexUpgradeRequired) {
+LOGGER.info("Upgrading local indexes");
+metaConnection = 
UpgradeUtil.upgradeLocalIndexes(metaConnection);
+}
+
 // Synchronize necessary properties amongst all column families of 
a base table
 // and its indexes. See PHOENIX-3955
 if (syncAllTableAndIndexProps) {



[phoenix] branch master updated: PHOENIX-6855 Upgrade from 4.7 to 5+ fails if any of the local indexes exist. (#1550)

2023-01-18 Thread ssa
This is an automated email from the ASF dual-hosted git repository.

ssa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new d2acf9c262 PHOENIX-6855 Upgrade from 4.7 to 5+ fails if any of the 
local indexes exist. (#1550)
d2acf9c262 is described below

commit d2acf9c2620e2caf636022fc759844d5c46cb455
Author: Sergey Soldatov 
AuthorDate: Wed Jan 18 06:43:20 2023 -0800

PHOENIX-6855 Upgrade from 4.7 to 5+ fails if any of the local indexes 
exist. (#1550)
---
 .../org/apache/phoenix/query/ConnectionQueryServicesImpl.java  | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 84abaee0b6..4a1256f054 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -392,6 +392,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 private final boolean shouldThrottleNumConnections;
 public static final byte[] MUTEX_LOCKED = 
"MUTEX_LOCKED".getBytes(StandardCharsets.UTF_8);
 private ServerSideRPCControllerFactory serverSideRPCControllerFactory;
+private boolean localIndexUpgradeRequired;
 
 private static interface FeatureSupported {
 boolean isSupported(ConnectionQueryServices services);
@@ -3933,7 +3934,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 metaConnection = UpgradeUtil.disableViewIndexes(metaConnection);
 if 
(getProps().getBoolean(QueryServices.LOCAL_INDEX_CLIENT_UPGRADE_ATTRIB,
   QueryServicesOptions.DEFAULT_LOCAL_INDEX_CLIENT_UPGRADE)) {
-metaConnection = 
UpgradeUtil.upgradeLocalIndexes(metaConnection);
+localIndexUpgradeRequired = true;
 }
 ConnectionQueryServicesImpl.this.removeTable(null,
   PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME, null,
@@ -4207,6 +4208,13 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 // snapshot entries
 metaConnection = upgradeOtherSystemTablesIfRequired(metaConnection,
 moveChildLinks, systemTableToSnapshotMap);
+
+// Once the system tables are upgraded the local index upgrade can 
be done
+if (localIndexUpgradeRequired) {
+LOGGER.info("Upgrading local indexes");
+metaConnection = 
UpgradeUtil.upgradeLocalIndexes(metaConnection);
+}
+
 // Synchronize necessary properties amongst all column families of 
a base table
 // and its indexes. See PHOENIX-3955
 if (syncAllTableAndIndexProps) {



[phoenix] branch 5.1 updated: PHOENIX-6579 ACL check doesn't honor the namespace mapping for mapped views.

2022-01-11 Thread ssa
This is an automated email from the ASF dual-hosted git repository.

ssa pushed a commit to branch 5.1
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.1 by this push:
 new 78e9060  PHOENIX-6579 ACL check doesn't honor the namespace mapping 
for mapped views.
78e9060 is described below

commit 78e9060186d3db3b71436a181f59f378c7b9302f
Author: Sergey Soldatov 
AuthorDate: Fri Oct 29 20:29:56 2021 +0300

PHOENIX-6579 ACL check doesn't honor the namespace mapping for mapped views.
---
 .../apache/phoenix/end2end/BasePermissionsIT.java  |  6 +--
 .../phoenix/end2end/PermissionNSEnabledIT.java | 45 ++
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  4 +-
 .../coprocessor/PhoenixAccessController.java   | 27 +++--
 4 files changed, 66 insertions(+), 16 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
index fe966dd..77ae600 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
@@ -99,7 +99,7 @@ public abstract class BasePermissionsIT extends BaseTest {
 
 private static final String SUPER_USER = System.getProperty("user.name");
 
-private static HBaseTestingUtility testUtil;
+static HBaseTestingUtility testUtil;
 private static final Set PHOENIX_SYSTEM_TABLES =
 new HashSet<>(Arrays.asList("SYSTEM.CATALOG", "SYSTEM.SEQUENCE", 
"SYSTEM.STATS",
 "SYSTEM.FUNCTION", "SYSTEM.MUTEX", "SYSTEM.CHILD_LINK"));
@@ -365,7 +365,7 @@ public abstract class BasePermissionsIT extends BaseTest {
 // UG Object
 // 1. Instance of String --> represents GROUP name
 // 2. Instance of User --> represents HBase user
-private AccessTestAction grantPermissions(final String actions, final 
Object ug,
+AccessTestAction grantPermissions(final String actions, final Object ug,
   final String tableOrSchemaList, final 
boolean isSchema) throws SQLException {
 return grantPermissions(actions, ug, 
Collections.singleton(tableOrSchemaList), isSchema);
 }
@@ -958,7 +958,7 @@ public abstract class BasePermissionsIT extends BaseTest {
 }
 }
 
-private String surroundWithDoubleQuotes(String input) {
+String surroundWithDoubleQuotes(String input) {
 return "\"" + input + "\"";
 }
 
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionNSEnabledIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionNSEnabledIT.java
index 7a2a995..292654f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionNSEnabledIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionNSEnabledIT.java
@@ -18,8 +18,14 @@
 package org.apache.phoenix.end2end;
 
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.access.Permission;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.util.SchemaUtil;
 import org.junit.BeforeClass;
@@ -29,11 +35,13 @@ import org.junit.experimental.categories.Category;
 import java.security.PrivilegedExceptionAction;
 import java.sql.Connection;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Collections;
 
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_TABLE;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CHILD_LINK_TABLE;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_SCHEMA_NAME;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -48,6 +56,43 @@ public class PermissionNSEnabledIT extends BasePermissionsIT 
{
 public static synchronized void doSetup() throws Exception {
 BasePermissionsIT.initCluster(true);
 }
+private AccessTestAction createMappedView(final String schemaName, final 
String tableName) throws SQLException {
+return new AccessTestAction() {
+@Override
+public Object run() throws Exception {
+try (Connection conn = getConnection(); Statement stmt = 
conn.createStatement();) {
+String viewStmtSQL = "CREATE VIEW \&

[1/3] phoenix git commit: PHOENIX-3236 Problem with shading apache commons on Azure.

2018-08-24 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.3 a575ac04e -> 79e833750
  refs/heads/4.x-HBase-1.4 3b9a108f3 -> fd80cb87e
  refs/heads/master 5947b6ade -> 16c5570d3


PHOENIX-3236 Problem with shading apache commons on Azure.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/16c5570d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/16c5570d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/16c5570d

Branch: refs/heads/master
Commit: 16c5570d38a3aa0527cbbdef21fd3859be4e729f
Parents: 5947b6a
Author: Sergey Soldatov 
Authored: Thu Sep 1 00:08:14 2016 -0700
Committer: Sergey Soldatov 
Committed: Fri Aug 24 10:56:19 2018 -0700

--
 phoenix-client/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/16c5570d/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index 6189bba..65c2298 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -275,6 +275,7 @@
   
 org.apache.commons.csv.**
 org.apache.commons.logging.**
+org.apache.commons.configuration.**
   
 
 



[3/3] phoenix git commit: PHOENIX-3236 Problem with shading apache commons on Azure.

2018-08-24 Thread ssa
PHOENIX-3236 Problem with shading apache commons on Azure.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fd80cb87
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fd80cb87
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fd80cb87

Branch: refs/heads/4.x-HBase-1.4
Commit: fd80cb87e2821e8f88a5c9c5a860cf3bdfea1b70
Parents: 3b9a108
Author: Sergey Soldatov 
Authored: Thu Sep 1 00:08:14 2016 -0700
Committer: Sergey Soldatov 
Committed: Fri Aug 24 10:57:55 2018 -0700

--
 phoenix-client/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fd80cb87/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index 56cdfbf..8d37c8b 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -275,6 +275,7 @@
   
 org.apache.commons.csv.**
 org.apache.commons.logging.**
+org.apache.commons.configuration.**
   
 
 



[2/3] phoenix git commit: PHOENIX-3236 Problem with shading apache commons on Azure.

2018-08-24 Thread ssa
PHOENIX-3236 Problem with shading apache commons on Azure.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/79e83375
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/79e83375
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/79e83375

Branch: refs/heads/4.x-HBase-1.3
Commit: 79e8337500b171f02b8740c0a809a2ffb9d0d1cd
Parents: a575ac0
Author: Sergey Soldatov 
Authored: Thu Sep 1 00:08:14 2016 -0700
Committer: Sergey Soldatov 
Committed: Fri Aug 24 10:57:37 2018 -0700

--
 phoenix-client/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/79e83375/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index c8fb6aa..b3cc9e5 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -275,6 +275,7 @@
   
 org.apache.commons.csv.**
 org.apache.commons.logging.**
+org.apache.commons.configuration.**
   
 
 



[1/5] phoenix git commit: PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when upserting without providing a value.

2018-07-26 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.2 faf4fb264 -> 67731c4db
  refs/heads/4.x-HBase-1.3 1f7e3206b -> b37b7d750
  refs/heads/4.x-HBase-1.4 6b363b3a2 -> a1d036676
  refs/heads/5.x-HBase-2.0 8a874cc95 -> accde3c7c
  refs/heads/master 8a874cc95 -> 6641f42d1


PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when 
upserting without providing a value.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6641f42d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6641f42d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6641f42d

Branch: refs/heads/master
Commit: 6641f42d1f71864ed8bb1d00a1c082bcbff25ad7
Parents: 8a874cc
Author: Sergey Soldatov 
Authored: Wed Jul 25 12:48:03 2018 -0700
Committer: Sergey Soldatov 
Committed: Thu Jul 26 13:37:23 2018 -0700

--
 .../apache/phoenix/end2end/RowTimestampIT.java  | 26 +---
 .../apache/phoenix/execute/MutationState.java   |  6 -
 2 files changed, 27 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6641f42d/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
index 509e305..e873fdf 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
@@ -75,13 +75,22 @@ public class RowTimestampIT extends ParallelStatsDisabledIT 
{
 }
 
 @Test
-public void testWithUpsertingRowTimestampColSpecified() throws Exception {
+public void testUpsertingRowTimestampColSpecifiedWithTimestamp() throws 
Exception {
+upsertingRowTimestampColSpecified("TIMESTAMP");
+}
+
+@Test
+public void testUpsertingRowTimestampColSpecifiedWithDate() throws 
Exception {
+upsertingRowTimestampColSpecified("DATE");
+}
+
+private void upsertingRowTimestampColSpecified(String type) throws 
Exception {
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {
@@ -195,14 +204,23 @@ public class RowTimestampIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
-public void 
testAutomaticallySettingRowTimestampForImmutableTableAndIndexes() throws 
Exception {
+public void testAutomaticallySettingRowTimestampWithTimestamp () throws 
Exception {
+
automaticallySettingRowTimestampForImmutableTableAndIndexes("TIMESTAMP");
+}
+
+@Test
+public void testAutomaticallySettingRowTimestampWithDate () throws 
Exception {
+automaticallySettingRowTimestampForImmutableTableAndIndexes("DATE");
+}
+
+private void 
automaticallySettingRowTimestampForImmutableTableAndIndexes(String type) throws 
Exception {
 long startTime = EnvironmentEdgeManager.currentTimeMillis();
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6641f42d/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index 35a844f..cf17b90 100644
--- 

[4/5] phoenix git commit: PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when upserting without providing a value.

2018-07-26 Thread ssa
PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when 
upserting without providing a value.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a1d03667
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a1d03667
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a1d03667

Branch: refs/heads/4.x-HBase-1.4
Commit: a1d036676f5608d50666085172eb780ea73ae6dd
Parents: 6b363b3
Author: Sergey Soldatov 
Authored: Wed Jul 25 12:48:03 2018 -0700
Committer: Sergey Soldatov 
Committed: Thu Jul 26 13:39:12 2018 -0700

--
 .../apache/phoenix/end2end/RowTimestampIT.java  | 26 +---
 .../apache/phoenix/execute/MutationState.java   |  6 -
 2 files changed, 27 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a1d03667/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
index 458cc38..0457bf3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
@@ -73,13 +73,22 @@ public class RowTimestampIT extends ParallelStatsDisabledIT 
{
 }
 
 @Test
-public void testWithUpsertingRowTimestampColSpecified() throws Exception {
+public void testUpsertingRowTimestampColSpecifiedWithTimestamp() throws 
Exception {
+upsertingRowTimestampColSpecified("TIMESTAMP");
+}
+
+@Test
+public void testUpsertingRowTimestampColSpecifiedWithDate() throws 
Exception {
+upsertingRowTimestampColSpecified("DATE");
+}
+
+private void upsertingRowTimestampColSpecified(String type) throws 
Exception {
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {
@@ -192,14 +201,23 @@ public class RowTimestampIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
-public void 
testAutomaticallySettingRowTimestampForImmutableTableAndIndexes() throws 
Exception {
+public void testAutomaticallySettingRowTimestampWithTimestamp () throws 
Exception {
+
automaticallySettingRowTimestampForImmutableTableAndIndexes("TIMESTAMP");
+}
+
+@Test
+public void testAutomaticallySettingRowTimestampWithDate () throws 
Exception {
+automaticallySettingRowTimestampForImmutableTableAndIndexes("DATE");
+}
+
+private void 
automaticallySettingRowTimestampForImmutableTableAndIndexes(String type) throws 
Exception {
 long startTime = EnvironmentEdgeManager.currentTimeMillis();
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a1d03667/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index c29d6b5..d2d1eea 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -25,6 +25,7 @@ import static 
org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_
 
 import java.io.IOException;
 

[3/5] phoenix git commit: PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when upserting without providing a value.

2018-07-26 Thread ssa
PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when 
upserting without providing a value.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b37b7d75
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b37b7d75
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b37b7d75

Branch: refs/heads/4.x-HBase-1.3
Commit: b37b7d7505570d8b198ecd4b6e5a626c73bf7ebe
Parents: 1f7e320
Author: Sergey Soldatov 
Authored: Wed Jul 25 12:48:03 2018 -0700
Committer: Sergey Soldatov 
Committed: Thu Jul 26 13:39:00 2018 -0700

--
 .../apache/phoenix/end2end/RowTimestampIT.java  | 26 +---
 .../apache/phoenix/execute/MutationState.java   |  6 -
 2 files changed, 27 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b37b7d75/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
index 458cc38..0457bf3 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
@@ -73,13 +73,22 @@ public class RowTimestampIT extends ParallelStatsDisabledIT 
{
 }
 
 @Test
-public void testWithUpsertingRowTimestampColSpecified() throws Exception {
+public void testUpsertingRowTimestampColSpecifiedWithTimestamp() throws 
Exception {
+upsertingRowTimestampColSpecified("TIMESTAMP");
+}
+
+@Test
+public void testUpsertingRowTimestampColSpecifiedWithDate() throws 
Exception {
+upsertingRowTimestampColSpecified("DATE");
+}
+
+private void upsertingRowTimestampColSpecified(String type) throws 
Exception {
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {
@@ -192,14 +201,23 @@ public class RowTimestampIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
-public void 
testAutomaticallySettingRowTimestampForImmutableTableAndIndexes() throws 
Exception {
+public void testAutomaticallySettingRowTimestampWithTimestamp () throws 
Exception {
+
automaticallySettingRowTimestampForImmutableTableAndIndexes("TIMESTAMP");
+}
+
+@Test
+public void testAutomaticallySettingRowTimestampWithDate () throws 
Exception {
+automaticallySettingRowTimestampForImmutableTableAndIndexes("DATE");
+}
+
+private void 
automaticallySettingRowTimestampForImmutableTableAndIndexes(String type) throws 
Exception {
 long startTime = EnvironmentEdgeManager.currentTimeMillis();
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b37b7d75/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index c29d6b5..d2d1eea 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -25,6 +25,7 @@ import static 
org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_
 
 import java.io.IOException;
 

[2/5] phoenix git commit: PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when upserting without providing a value.

2018-07-26 Thread ssa
PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when 
upserting without providing a value.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/67731c4d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/67731c4d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/67731c4d

Branch: refs/heads/4.x-HBase-1.2
Commit: 67731c4dbde85d9bf0526296adc03e016a522c50
Parents: faf4fb2
Author: Sergey Soldatov 
Authored: Wed Jul 25 12:48:03 2018 -0700
Committer: Sergey Soldatov 
Committed: Thu Jul 26 13:38:41 2018 -0700

--
 .../apache/phoenix/end2end/RowTimestampIT.java  | 26 +---
 .../apache/phoenix/execute/MutationState.java   |  6 -
 2 files changed, 27 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/67731c4d/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
index 09fd821..2cf4d9d 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
@@ -73,13 +73,22 @@ public class RowTimestampIT extends ParallelStatsDisabledIT 
{
 }
 
 @Test
-public void testWithUpsertingRowTimestampColSpecified() throws Exception {
+public void testUpsertingRowTimestampColSpecifiedWithTimestamp() throws 
Exception {
+upsertingRowTimestampColSpecified("TIMESTAMP");
+}
+
+@Test
+public void testUpsertingRowTimestampColSpecifiedWithDate() throws 
Exception {
+upsertingRowTimestampColSpecified("DATE");
+}
+
+private void upsertingRowTimestampColSpecified(String type) throws 
Exception {
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {
@@ -195,14 +204,23 @@ public class RowTimestampIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
-public void 
testAutomaticallySettingRowTimestampForImmutableTableAndIndexes() throws 
Exception {
+public void testAutomaticallySettingRowTimestampWithTimestamp () throws 
Exception {
+
automaticallySettingRowTimestampForImmutableTableAndIndexes("TIMESTAMP");
+}
+
+@Test
+public void testAutomaticallySettingRowTimestampWithDate () throws 
Exception {
+automaticallySettingRowTimestampForImmutableTableAndIndexes("DATE");
+}
+
+private void 
automaticallySettingRowTimestampForImmutableTableAndIndexes(String type) throws 
Exception {
 long startTime = EnvironmentEdgeManager.currentTimeMillis();
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/67731c4d/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index c29d6b5..d2d1eea 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -25,6 +25,7 @@ import static 
org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_
 
 import java.io.IOException;
 

[5/5] phoenix git commit: PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when upserting without providing a value.

2018-07-26 Thread ssa
PHOENIX-3991 ROW_TIMESTAMP on TIMESTAMP column type throws ArrayOutOfBound when 
upserting without providing a value.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/accde3c7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/accde3c7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/accde3c7

Branch: refs/heads/5.x-HBase-2.0
Commit: accde3c7c40a85e084eca7cffb183638265a47c5
Parents: 8a874cc
Author: Sergey Soldatov 
Authored: Wed Jul 25 12:48:03 2018 -0700
Committer: Sergey Soldatov 
Committed: Thu Jul 26 13:39:34 2018 -0700

--
 .../apache/phoenix/end2end/RowTimestampIT.java  | 26 +---
 .../apache/phoenix/execute/MutationState.java   |  6 -
 2 files changed, 27 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/accde3c7/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
index 509e305..e873fdf 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/RowTimestampIT.java
@@ -75,13 +75,22 @@ public class RowTimestampIT extends ParallelStatsDisabledIT 
{
 }
 
 @Test
-public void testWithUpsertingRowTimestampColSpecified() throws Exception {
+public void testUpsertingRowTimestampColSpecifiedWithTimestamp() throws 
Exception {
+upsertingRowTimestampColSpecified("TIMESTAMP");
+}
+
+@Test
+public void testUpsertingRowTimestampColSpecifiedWithDate() throws 
Exception {
+upsertingRowTimestampColSpecified("DATE");
+}
+
+private void upsertingRowTimestampColSpecified(String type) throws 
Exception {
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {
@@ -195,14 +204,23 @@ public class RowTimestampIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
-public void 
testAutomaticallySettingRowTimestampForImmutableTableAndIndexes() throws 
Exception {
+public void testAutomaticallySettingRowTimestampWithTimestamp () throws 
Exception {
+
automaticallySettingRowTimestampForImmutableTableAndIndexes("TIMESTAMP");
+}
+
+@Test
+public void testAutomaticallySettingRowTimestampWithDate () throws 
Exception {
+automaticallySettingRowTimestampForImmutableTableAndIndexes("DATE");
+}
+
+private void 
automaticallySettingRowTimestampForImmutableTableAndIndexes(String type) throws 
Exception {
 long startTime = EnvironmentEdgeManager.currentTimeMillis();
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 try (Connection conn = DriverManager.getConnection(getUrl())) {
 conn.createStatement()
 .execute("CREATE TABLE IF NOT EXISTS " + tableName
-+ " (PK1 VARCHAR NOT NULL, PK2 DATE NOT NULL, KV1 
VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
++ " (PK1 VARCHAR NOT NULL, PK2 " + type + " NOT 
NULL, KV1 VARCHAR, KV2 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 "
 + sortOrder + " ROW_TIMESTAMP)) " + 
tableDDLOptions);
 }
 try (Connection conn = DriverManager.getConnection(getUrl())) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/accde3c7/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index 35a844f..cf17b90 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -25,6 +25,7 @@ import static 
org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_
 
 import java.io.IOException;
 

svn commit: r1835673 - in /phoenix/site: publish/update_statistics.html source/src/site/markdown/update_statistics.md

2018-07-11 Thread ssa
Author: ssa
Date: Wed Jul 11 21:00:47 2018
New Revision: 1835673

URL: http://svn.apache.org/viewvc?rev=1835673=rev
Log:
Updated statistics page for the known issue

Modified:
phoenix/site/publish/update_statistics.html
phoenix/site/source/src/site/markdown/update_statistics.md

Modified: phoenix/site/publish/update_statistics.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/update_statistics.html?rev=1835673=1835672=1835673=diff
==
--- phoenix/site/publish/update_statistics.html (original)
+++ phoenix/site/publish/update_statistics.html Wed Jul 11 21:00:47 2018
@@ -1,7 +1,7 @@
 
 
 
 
@@ -206,6 +206,13 @@
   
  
  
+ Known issues 
+  
+  Duplicated records (SQL count shows more rows than HBase 
row_count) for Phoenix versions earlier than 4.12  
+  
+ May happen for tables with several regions where guide posts were not 
generated for last region(s) because the region size is smaller than the guide 
post width. In that case, the parallel scans for those regions would start with 
the latest guide post instead of startkey of this region. Fixed in 4.12 
as part of https://issues.apache.org/jira/browse/PHOENIX-4007;>PHOENIX-4007  
+ 
+ 
  Configuration 
  The configuration parameters controlling statistics collection 
include: 
   

Modified: phoenix/site/source/src/site/markdown/update_statistics.md
URL: 
http://svn.apache.org/viewvc/phoenix/site/source/src/site/markdown/update_statistics.md?rev=1835673=1835672=1835673=diff
==
--- phoenix/site/source/src/site/markdown/update_statistics.md (original)
+++ phoenix/site/source/src/site/markdown/update_statistics.md Wed Jul 11 
21:00:47 2018
@@ -53,6 +53,14 @@ To remove the guidepost width, set the p
 
 ALTER TABLE my_table SET GUIDE_POSTS_WIDTH = null
 
+##Known issues
+
+* **Duplicated records** (SQL count shows more rows than HBase 
row_count) for Phoenix versions earlier than **4.12** 
+
+May happen for tables with several regions where guide posts were not 
generated for last region(s) because the region size is smaller than the guide 
post width.
+In that case, the parallel scans for those regions would start with the latest 
guide post instead of startkey of this region.
+**Fixed in 4.12** as part of 
[PHOENIX-4007](https://issues.apache.org/jira/browse/PHOENIX-4007) 
+
 ## Configuration
 
 The configuration parameters controlling statistics collection include:




[04/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9ccc32dd
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9ccc32dd
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9ccc32dd

Branch: refs/heads/4.x-HBase-1.1
Commit: 9ccc32dd03b77fa4fc7ed177f5e3eb326bc5b6b3
Parents: e71e767
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:48:12 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9ccc32dd/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index b751d6f..0e84465 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -919,12 +919,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3547,7 +3547,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 boolean isTablesMappingEnabled = 
SchemaUtil.isNamespaceMappingEnabled(PTableType.TABLE,
 new ReadOnlyProps(config.iterator()));
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/9ccc32dd/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[10/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f62cd681
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f62cd681
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f62cd681

Branch: refs/heads/4.x-cdh5.14
Commit: f62cd6816ff7599e301c7888222b3ac9ac3fa40a
Parents: 220845a
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 13:00:24 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f62cd681/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f62cd681/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[09/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6e762234
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6e762234
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6e762234

Branch: refs/heads/4.x-cdh5.13
Commit: 6e7622340a3ff1277d065434a9a700e41923418d
Parents: 465c486
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 13:00:09 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6e762234/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6e762234/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[05/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e84fd098
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e84fd098
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e84fd098

Branch: refs/heads/4.x-HBase-1.2
Commit: e84fd098bc94bb16903d9ba37f57b69adfd6e838
Parents: d4a5456
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:58:25 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e84fd098/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e84fd098/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[02/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/614c57d9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/614c57d9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/614c57d9

Branch: refs/heads/master
Commit: 614c57d91ba11bd8109dac769860e0186c8752bf
Parents: b19fde2
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:36:29 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/614c57d9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/614c57d9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[01/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 5b3fd4041 -> e6f9ce035
  refs/heads/4.x-HBase-1.1 e71e76780 -> 9ccc32dd0
  refs/heads/4.x-HBase-1.2 d4a545626 -> e84fd098b
  refs/heads/4.x-HBase-1.3 e2212e2ab -> 24af10405
  refs/heads/4.x-cdh5.11 58082f810 -> 5cc7575ef
  refs/heads/4.x-cdh5.12 9e80b8801 -> fc75ea703
  refs/heads/4.x-cdh5.13 465c486fb -> 6e7622340
  refs/heads/4.x-cdh5.14 220845a86 -> f62cd6816
  refs/heads/5.x-HBase-2.0 b21877d75 -> d79c30023
  refs/heads/master b19fde2c5 -> 614c57d91


PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d79c3002
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d79c3002
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d79c3002

Branch: refs/heads/5.x-HBase-2.0
Commit: d79c30023af47f32e2dac9d871aa75265cebc34f
Parents: b21877d
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:35:38 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java|  6 +++---
 .../apache/phoenix/coprocessor/MetaDataProtocol.java | 15 +++
 .../apache/phoenix/exception/SQLExceptionCode.java   |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java|  4 ++--
 .../write/TrackingParallelWriterIndexCommitter.java  |  6 +++---
 .../phoenix/index/PhoenixTransactionalIndexer.java   |  4 ++--
 .../apache/phoenix/jdbc/PhoenixDatabaseMetaData.java |  1 +
 .../phoenix/query/ConnectionQueryServicesImpl.java   |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java|  4 ++--
 .../main/java/org/apache/phoenix/util/ScanUtil.java  |  3 +--
 10 files changed, 34 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d79c3002/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index a9b8267..50a1714 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -947,12 +947,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements RegionCopr
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3694,7 +3694,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements RegionCopr
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d79c3002/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 2a1c3a5..0bd1f8c 100644
--- 

[07/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5cc7575e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5cc7575e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5cc7575e

Branch: refs/heads/4.x-cdh5.11
Commit: 5cc7575ef4a996930f1adaef3a50fd447c87ae7b
Parents: 58082f8
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:59:24 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5cc7575e/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5cc7575e/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[03/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e6f9ce03
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e6f9ce03
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e6f9ce03

Branch: refs/heads/4.x-HBase-0.98
Commit: e6f9ce0352dbf4f5b1a2d086c1c6068426afc1ac
Parents: 5b3fd40
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:38:28 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e6f9ce03/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 88d7ccd..05b7f41 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -929,12 +929,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3548,7 +3548,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e6f9ce03/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 4b3f34d..e287de2 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[08/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fc75ea70
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fc75ea70
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fc75ea70

Branch: refs/heads/4.x-cdh5.12
Commit: fc75ea70359d5197016a87bf290e94de769f6ad3
Parents: 9e80b88
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:59:47 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fc75ea70/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/fc75ea70/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

[06/10] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-05-31 Thread ssa
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/24af1040
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/24af1040
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/24af1040

Branch: refs/heads/4.x-HBase-1.3
Commit: 24af1040591e088a9b4722ae825762d331d102c2
Parents: e2212e2
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:58:36 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/24af1040/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/24af1040/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 

phoenix git commit: PHOENIX-4756 Integration tests for PhoenixStorageHandler doesn't work on 5.x branch

2018-05-30 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 8a955d47d -> b21877d75


PHOENIX-4756 Integration tests for PhoenixStorageHandler doesn't work on 5.x 
branch


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b21877d7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b21877d7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b21877d7

Branch: refs/heads/5.x-HBase-2.0
Commit: b21877d75b66cb2c738f4949b444623981f30d95
Parents: 8a955d4
Author: Sergey Soldatov 
Authored: Sun May 27 23:05:00 2018 -0700
Committer: ss77892 
Committed: Wed May 30 21:03:44 2018 -0700

--
 phoenix-hive/pom.xml| 29 
 .../apache/phoenix/hive/HivePhoenixStoreIT.java |  9 --
 .../PhoenixStorageHandlerConstants.java |  4 +--
 .../PhoenixByteObjectInspector.java |  2 +-
 .../PhoenixDoubleObjectInspector.java   |  2 +-
 5 files changed, 39 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b21877d7/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 8b9b4c1..0bc582c 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -78,6 +78,12 @@
 
   org.apache.hadoop
   hadoop-mapreduce-client-core
+  
+
+  io.netty
+  netty
+
+  
 
 
 
@@ -105,6 +111,11 @@
   test
 
 
+  io.netty
+  netty-all
+  4.1.17.Final
+
+
   org.apache.hadoop
   hadoop-hdfs
   test-jar
@@ -156,6 +167,24 @@
 
   
 
+  
+org.eclipse.jetty
+jetty-util
+test
+9.3.8.v20160314
+  
+  
+org.eclipse.jetty
+jetty-http
+test
+9.3.8.v20160314
+  
+  
+org.eclipse.jetty
+jetty-server
+test
+9.3.8.v20160314
+  
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b21877d7/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index a62d780..ecb2003 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -315,7 +315,7 @@ public class HivePhoenixStoreIT  extends 
BaseHivePhoenixStoreIT {
 public void testTimestampPredicate() throws Exception {
 String testName = "testTimeStampPredicate";
 hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, 
testName + ".out"));
-createFile("10\t2013-01-02 01:01:01.123456\n", new Path(hiveOutputDir, 
testName + ".out").toString());
+createFile("10\t2013-01-02 01:01:01.123\n", new Path(hiveOutputDir, 
testName + ".out").toString());
 createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + 
".out").toString());
 
 StringBuilder sb = new StringBuilder();
@@ -330,9 +330,12 @@ public class HivePhoenixStoreIT  extends 
BaseHivePhoenixStoreIT {
 hbaseTestUtil.getZkCluster().getClientPort() + "'," + 
HiveTestUtil.CRLF +
 "   'phoenix.column.mapping' = 'id:ID, ts:TS'," + 
HiveTestUtil.CRLF +
 "   'phoenix.rowkeys'='id');" + HiveTestUtil.CRLF);
+/*
+Following query only for check that nanoseconds are correctly parsed 
with over 3 digits.
+ */
 sb.append("INSERT INTO TABLE timeStampTable VALUES (10, \"2013-01-02 
01:01:01.123456\");" + HiveTestUtil.CRLF);
-sb.append("SELECT * from timeStampTable WHERE ts between '2013-01-02 
01:01:01.123455' and " +
-" '2013-01-02 12:01:02.123457789' AND id = 10;" + 
HiveTestUtil.CRLF);
+sb.append("SELECT * from timeStampTable WHERE ts between '2012-01-02 
01:01:01.123455' and " +
+" '2015-01-02 12:01:02.123457789' AND id = 10;" + 
HiveTestUtil.CRLF);
 
 String fullPath = new Path(hbaseTestUtil.getDataTestDir(), 
testName).toString();
 createFile(sb.toString(), fullPath);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b21877d7/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
index 

phoenix git commit: PHOENIX-4534 upsert/delete/upsert for the same row corrupts the indexes

2018-05-10 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/master 9566c78bd -> 0bcb872ba


PHOENIX-4534 upsert/delete/upsert for the same row corrupts the indexes


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0bcb872b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0bcb872b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0bcb872b

Branch: refs/heads/master
Commit: 0bcb872ba7bb74e3131a339440e38f9689471388
Parents: 9566c78
Author: Rajeshbabu Chintaguntla 
Authored: Thu May 10 10:25:38 2018 -0700
Committer: ss77892 
Committed: Thu May 10 10:29:59 2018 -0700

--
 .../phoenix/end2end/index/MutableIndexIT.java   | 41 
 .../filter/ApplyAndFilterDeletesFilter.java |  7 
 .../index/scanner/FilteredKeyValueScanner.java  |  7 +++-
 3 files changed, 47 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0bcb872b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
index e968e99..cfaed72 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
@@ -910,6 +910,47 @@ public class MutableIndexIT extends 
ParallelStatsDisabledIT {
   }
   }
 
+
+  @Test
+  public void testUpsertingDeletedRowShouldGiveProperDataWithIndexes() throws 
Exception {
+  testUpsertingDeletedRowShouldGiveProperDataWithIndexes(false);
+  }
+
+  @Test
+  public void testUpsertingDeletedRowShouldGiveProperDataWithMultiCFIndexes() 
throws Exception {
+  testUpsertingDeletedRowShouldGiveProperDataWithIndexes(true);
+  }
+
+  private void testUpsertingDeletedRowShouldGiveProperDataWithIndexes(boolean 
multiCf) throws Exception {
+  String tableName = "TBL_" + generateUniqueName();
+  String indexName = "IDX_" + generateUniqueName();
+  String columnFamily1 = "cf1";
+  String columnFamily2 = "cf2";
+  String fullTableName = 
SchemaUtil.getTableName(TestUtil.DEFAULT_SCHEMA_NAME, tableName);
+  try (Connection conn = getConnection()) {
+conn.createStatement().execute(
+"create table " + fullTableName + " (id integer primary key, "
++ (multiCf ? columnFamily1 : "") + "f float, "
++ (multiCf ? columnFamily2 : "") + "s varchar)" + 
tableDDLOptions);
+conn.createStatement().execute(
+"create index " + indexName + " on " + fullTableName + " ("
++ (multiCf ? columnFamily1 : "") + "f) include 
("+(multiCf ? columnFamily2 : "") +"s)");
+conn.createStatement().execute(
+"upsert into " + fullTableName + " values (1, 0.5, 'foo')");
+  conn.commit();
+  conn.createStatement().execute("delete from  " + fullTableName + " 
where id = 1");
+  conn.commit();
+conn.createStatement().execute(
+"upsert into  " + fullTableName + " values (1, 0.5, 'foo')");
+  conn.commit();
+  ResultSet rs = conn.createStatement().executeQuery("select * from 
"+indexName);
+  assertTrue(rs.next());
+  assertEquals(1, rs.getInt(2));
+  assertEquals(0.5F, rs.getFloat(1), 0.0);
+  assertEquals("foo", rs.getString(3));
+  } 
+  }
+
 private void upsertRow(String dml, Connection tenantConn, int i) throws 
SQLException {
 PreparedStatement stmt = tenantConn.prepareStatement(dml);
   stmt.setString(1, "00" + String.valueOf(i));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0bcb872b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
index 17779ba..2e9878d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
@@ -105,11 +105,6 @@ public class ApplyAndFilterDeletesFilter extends 
FilterBase {
 
   @Override
   public ReturnCode filterKeyValue(Cell next) {
-// we marked ourselves done, but the END_ROW_KEY didn't manage to seek to 
the very last key
-

phoenix git commit: PHOENIX-4534 upsert/delete/upsert for the same row corrupts the indexes-addendum(Rajeshbabu)

2018-05-10 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/master 6157c13ee -> 9566c78bd


PHOENIX-4534 upsert/delete/upsert for the same row corrupts the 
indexes-addendum(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9566c78b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9566c78b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9566c78b

Branch: refs/heads/master
Commit: 9566c78bd8aaace54aaebf34171f0d837ad7c0b7
Parents: 6157c13
Author: Rajeshbabu Chintaguntla 
Authored: Thu Jan 25 17:58:40 2018 +0530
Committer: ss77892 
Committed: Thu May 10 10:06:48 2018 -0700

--
 .../hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java | 1 -
 1 file changed, 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9566c78b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
index a1f01ed..17779ba 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/filter/ApplyAndFilterDeletesFilter.java
@@ -95,7 +95,6 @@ public class ApplyAndFilterDeletesFilter extends FilterBase {
   @Override
   public void reset(){
 this.coveringDelete.reset();
-this.done = false;
   }
   
   



[2/6] phoenix git commit: PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are built on tables which have named column family

2018-04-09 Thread ssa
PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are 
built on tables which have named column family

Signed-off-by: ss77892 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/697d871a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/697d871a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/697d871a

Branch: refs/heads/4.x-HBase-1.1
Commit: 697d871a7a02bf0380c26733670840d39d303c79
Parents: 9905126
Author: Toshihiro Suzuki 
Authored: Thu Mar 29 17:17:37 2018 +0900
Committer: ss77892 
Committed: Mon Apr 9 21:50:09 2018 -0700

--
 .../phoenix/end2end/index/ViewIndexIT.java  | 45 +++-
 .../query/ConnectionQueryServicesImpl.java  | 27 ++--
 2 files changed, 57 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/697d871a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
index 53bb550..8ffd798 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
@@ -31,6 +31,7 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
@@ -443,5 +444,47 @@ public class ViewIndexIT extends ParallelStatsDisabledIT {
 assertFalse(rs.next());
 assertEquals(indexName, 
stmt.getQueryPlan().getContext().getCurrentTable().getTable().getName().getString());
 }
-
+
+@Test
+public void 
testCreatingIndexOnViewBuiltOnTableWithOnlyNamedColumnFamilies() throws 
Exception {
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String indexName = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("CREATE TABLE " + tableName + " (COL1 VARCHAR PRIMARY 
KEY, CF.COL2 VARCHAR)");
+s.executeUpdate("UPSERT INTO " + tableName + " VALUES ('AAA', 
'BBB')");
+s.execute("CREATE VIEW " + viewName + " AS SELECT * FROM " + 
tableName);
+s.execute("CREATE INDEX " + indexName + " ON " + viewName + " 
(CF.COL2)");
+
+try (ResultSet rs = s.executeQuery("SELECT * FROM " + viewName + " 
WHERE CF.COL2 = 'BBB'")) {
+assertTrue(rs.next());
+assertEquals("AAA", rs.getString("COL1"));
+assertEquals("BBB", rs.getString("COL2"));
+}
+}
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String index1Name = generateUniqueName();
+String index2Name = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("create table " + tableName + " (i1 integer primary key, 
c2.i2 integer, c3.i3 integer, c4.i4 integer)");
+s.execute("create view " + viewName + " as select * from " + 
tableName + " where c2.i2 = 1");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (1, 1, 1)");
+s.execute("create index " + index1Name + " ON " + viewName + " 
(c3.i3)");
+s.execute("create index " + index2Name + " ON " + viewName + " 
(c3.i3) include (c4.i4)");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (2, 2, 2)");
+
+try (ResultSet rs = s.executeQuery("select * from " + viewName + " 
WHERE c3.i3 = 1")) {
+assertTrue(rs.next());
+assertEquals(1, rs.getInt("i1"));
+assertEquals(1, rs.getInt("i2"));
+assertEquals(1, rs.getInt("i3"));
+assertEquals(1, rs.getInt("i4"));
+}
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/697d871a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 77aa376..e383e7d 100644
--- 

[4/6] phoenix git commit: PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are built on tables which have named column family

2018-04-09 Thread ssa
PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are 
built on tables which have named column family

Signed-off-by: ss77892 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/753e7842
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/753e7842
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/753e7842

Branch: refs/heads/4.x-HBase-1.3
Commit: 753e78423e9784c62843b097824667ccf8c35eba
Parents: 775c046
Author: Toshihiro Suzuki 
Authored: Thu Mar 29 17:17:37 2018 +0900
Committer: ss77892 
Committed: Mon Apr 9 22:01:01 2018 -0700

--
 .../phoenix/end2end/index/ViewIndexIT.java  | 45 +++-
 .../query/ConnectionQueryServicesImpl.java  | 27 ++--
 2 files changed, 57 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/753e7842/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
index 53bb550..8ffd798 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
@@ -31,6 +31,7 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
@@ -443,5 +444,47 @@ public class ViewIndexIT extends ParallelStatsDisabledIT {
 assertFalse(rs.next());
 assertEquals(indexName, 
stmt.getQueryPlan().getContext().getCurrentTable().getTable().getName().getString());
 }
-
+
+@Test
+public void 
testCreatingIndexOnViewBuiltOnTableWithOnlyNamedColumnFamilies() throws 
Exception {
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String indexName = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("CREATE TABLE " + tableName + " (COL1 VARCHAR PRIMARY 
KEY, CF.COL2 VARCHAR)");
+s.executeUpdate("UPSERT INTO " + tableName + " VALUES ('AAA', 
'BBB')");
+s.execute("CREATE VIEW " + viewName + " AS SELECT * FROM " + 
tableName);
+s.execute("CREATE INDEX " + indexName + " ON " + viewName + " 
(CF.COL2)");
+
+try (ResultSet rs = s.executeQuery("SELECT * FROM " + viewName + " 
WHERE CF.COL2 = 'BBB'")) {
+assertTrue(rs.next());
+assertEquals("AAA", rs.getString("COL1"));
+assertEquals("BBB", rs.getString("COL2"));
+}
+}
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String index1Name = generateUniqueName();
+String index2Name = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("create table " + tableName + " (i1 integer primary key, 
c2.i2 integer, c3.i3 integer, c4.i4 integer)");
+s.execute("create view " + viewName + " as select * from " + 
tableName + " where c2.i2 = 1");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (1, 1, 1)");
+s.execute("create index " + index1Name + " ON " + viewName + " 
(c3.i3)");
+s.execute("create index " + index2Name + " ON " + viewName + " 
(c3.i3) include (c4.i4)");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (2, 2, 2)");
+
+try (ResultSet rs = s.executeQuery("select * from " + viewName + " 
WHERE c3.i3 = 1")) {
+assertTrue(rs.next());
+assertEquals(1, rs.getInt("i1"));
+assertEquals(1, rs.getInt("i2"));
+assertEquals(1, rs.getInt("i3"));
+assertEquals(1, rs.getInt("i4"));
+}
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/753e7842/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index eff406d..6df2f80 100644
--- 

[6/6] phoenix git commit: PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are built on tables which have named column family

2018-04-09 Thread ssa
PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are 
built on tables which have named column family

Signed-off-by: ss77892 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c22e4c39
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c22e4c39
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c22e4c39

Branch: refs/heads/4.x-HBase-0.98
Commit: c22e4c39c60cee88930366d1544353dd7a87e037
Parents: a9ddf17
Author: Toshihiro Suzuki 
Authored: Thu Mar 29 17:17:37 2018 +0900
Committer: ss77892 
Committed: Mon Apr 9 22:18:33 2018 -0700

--
 .../phoenix/end2end/index/ViewIndexIT.java  | 45 +++-
 .../query/ConnectionQueryServicesImpl.java  | 27 ++--
 2 files changed, 57 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c22e4c39/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
index 53bb550..8ffd798 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
@@ -31,6 +31,7 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
@@ -443,5 +444,47 @@ public class ViewIndexIT extends ParallelStatsDisabledIT {
 assertFalse(rs.next());
 assertEquals(indexName, 
stmt.getQueryPlan().getContext().getCurrentTable().getTable().getName().getString());
 }
-
+
+@Test
+public void 
testCreatingIndexOnViewBuiltOnTableWithOnlyNamedColumnFamilies() throws 
Exception {
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String indexName = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("CREATE TABLE " + tableName + " (COL1 VARCHAR PRIMARY 
KEY, CF.COL2 VARCHAR)");
+s.executeUpdate("UPSERT INTO " + tableName + " VALUES ('AAA', 
'BBB')");
+s.execute("CREATE VIEW " + viewName + " AS SELECT * FROM " + 
tableName);
+s.execute("CREATE INDEX " + indexName + " ON " + viewName + " 
(CF.COL2)");
+
+try (ResultSet rs = s.executeQuery("SELECT * FROM " + viewName + " 
WHERE CF.COL2 = 'BBB'")) {
+assertTrue(rs.next());
+assertEquals("AAA", rs.getString("COL1"));
+assertEquals("BBB", rs.getString("COL2"));
+}
+}
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String index1Name = generateUniqueName();
+String index2Name = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("create table " + tableName + " (i1 integer primary key, 
c2.i2 integer, c3.i3 integer, c4.i4 integer)");
+s.execute("create view " + viewName + " as select * from " + 
tableName + " where c2.i2 = 1");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (1, 1, 1)");
+s.execute("create index " + index1Name + " ON " + viewName + " 
(c3.i3)");
+s.execute("create index " + index2Name + " ON " + viewName + " 
(c3.i3) include (c4.i4)");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (2, 2, 2)");
+
+try (ResultSet rs = s.executeQuery("select * from " + viewName + " 
WHERE c3.i3 = 1")) {
+assertTrue(rs.next());
+assertEquals(1, rs.getInt("i1"));
+assertEquals(1, rs.getInt("i2"));
+assertEquals(1, rs.getInt("i3"));
+assertEquals(1, rs.getInt("i4"));
+}
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c22e4c39/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 9c17f03..1989c54 100644
--- 

[1/6] phoenix git commit: PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are built on tables which have named column family

2018-04-09 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 a9ddf1709 -> c22e4c39c
  refs/heads/4.x-HBase-1.1 990512606 -> 697d871a7
  refs/heads/4.x-HBase-1.2 18c778388 -> aec5101d9
  refs/heads/4.x-HBase-1.3 775c046ea -> 753e78423
  refs/heads/5.x-HBase-2.0 d653973a2 -> 880cda16f
  refs/heads/master 28c11fe3f -> 98a8ddda4


PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are 
built on tables which have named column family

Signed-off-by: ss77892 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/98a8ddda
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/98a8ddda
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/98a8ddda

Branch: refs/heads/master
Commit: 98a8ddda49eb38b72984ea3800c4ed37299f0254
Parents: 28c11fe
Author: Toshihiro Suzuki 
Authored: Thu Mar 29 17:17:37 2018 +0900
Committer: ss77892 
Committed: Mon Apr 9 21:39:40 2018 -0700

--
 .../phoenix/end2end/index/ViewIndexIT.java  | 45 +++-
 .../query/ConnectionQueryServicesImpl.java  | 27 ++--
 2 files changed, 57 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/98a8ddda/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
index 53bb550..8ffd798 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
@@ -31,6 +31,7 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
@@ -443,5 +444,47 @@ public class ViewIndexIT extends ParallelStatsDisabledIT {
 assertFalse(rs.next());
 assertEquals(indexName, 
stmt.getQueryPlan().getContext().getCurrentTable().getTable().getName().getString());
 }
-
+
+@Test
+public void 
testCreatingIndexOnViewBuiltOnTableWithOnlyNamedColumnFamilies() throws 
Exception {
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String indexName = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("CREATE TABLE " + tableName + " (COL1 VARCHAR PRIMARY 
KEY, CF.COL2 VARCHAR)");
+s.executeUpdate("UPSERT INTO " + tableName + " VALUES ('AAA', 
'BBB')");
+s.execute("CREATE VIEW " + viewName + " AS SELECT * FROM " + 
tableName);
+s.execute("CREATE INDEX " + indexName + " ON " + viewName + " 
(CF.COL2)");
+
+try (ResultSet rs = s.executeQuery("SELECT * FROM " + viewName + " 
WHERE CF.COL2 = 'BBB'")) {
+assertTrue(rs.next());
+assertEquals("AAA", rs.getString("COL1"));
+assertEquals("BBB", rs.getString("COL2"));
+}
+}
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String index1Name = generateUniqueName();
+String index2Name = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("create table " + tableName + " (i1 integer primary key, 
c2.i2 integer, c3.i3 integer, c4.i4 integer)");
+s.execute("create view " + viewName + " as select * from " + 
tableName + " where c2.i2 = 1");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (1, 1, 1)");
+s.execute("create index " + index1Name + " ON " + viewName + " 
(c3.i3)");
+s.execute("create index " + index2Name + " ON " + viewName + " 
(c3.i3) include (c4.i4)");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (2, 2, 2)");
+
+try (ResultSet rs = s.executeQuery("select * from " + viewName + " 
WHERE c3.i3 = 1")) {
+assertTrue(rs.next());
+assertEquals(1, rs.getInt("i1"));
+assertEquals(1, rs.getInt("i2"));
+assertEquals(1, rs.getInt("i3"));
+assertEquals(1, rs.getInt("i4"));
+}
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/98a8ddda/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java

[3/6] phoenix git commit: PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are built on tables which have named column family

2018-04-09 Thread ssa
PHOENIX-4669 NoSuchColumnFamilyException when creating index on views that are 
built on tables which have named column family

Signed-off-by: ss77892 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aec5101d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aec5101d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aec5101d

Branch: refs/heads/4.x-HBase-1.2
Commit: aec5101d92b2be20ad227ddf9c41b6d1c9feada6
Parents: 18c7783
Author: Toshihiro Suzuki 
Authored: Thu Mar 29 17:17:37 2018 +0900
Committer: ss77892 
Committed: Mon Apr 9 22:00:46 2018 -0700

--
 .../phoenix/end2end/index/ViewIndexIT.java  | 45 +++-
 .../query/ConnectionQueryServicesImpl.java  | 27 ++--
 2 files changed, 57 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec5101d/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
index 53bb550..8ffd798 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ViewIndexIT.java
@@ -31,6 +31,7 @@ import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
@@ -443,5 +444,47 @@ public class ViewIndexIT extends ParallelStatsDisabledIT {
 assertFalse(rs.next());
 assertEquals(indexName, 
stmt.getQueryPlan().getContext().getCurrentTable().getTable().getName().getString());
 }
-
+
+@Test
+public void 
testCreatingIndexOnViewBuiltOnTableWithOnlyNamedColumnFamilies() throws 
Exception {
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String indexName = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("CREATE TABLE " + tableName + " (COL1 VARCHAR PRIMARY 
KEY, CF.COL2 VARCHAR)");
+s.executeUpdate("UPSERT INTO " + tableName + " VALUES ('AAA', 
'BBB')");
+s.execute("CREATE VIEW " + viewName + " AS SELECT * FROM " + 
tableName);
+s.execute("CREATE INDEX " + indexName + " ON " + viewName + " 
(CF.COL2)");
+
+try (ResultSet rs = s.executeQuery("SELECT * FROM " + viewName + " 
WHERE CF.COL2 = 'BBB'")) {
+assertTrue(rs.next());
+assertEquals("AAA", rs.getString("COL1"));
+assertEquals("BBB", rs.getString("COL2"));
+}
+}
+try (Connection c = getConnection(); Statement s = 
c.createStatement()) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String index1Name = generateUniqueName();
+String index2Name = generateUniqueName();
+
+c.setAutoCommit(true);
+s.execute("create table " + tableName + " (i1 integer primary key, 
c2.i2 integer, c3.i3 integer, c4.i4 integer)");
+s.execute("create view " + viewName + " as select * from " + 
tableName + " where c2.i2 = 1");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (1, 1, 1)");
+s.execute("create index " + index1Name + " ON " + viewName + " 
(c3.i3)");
+s.execute("create index " + index2Name + " ON " + viewName + " 
(c3.i3) include (c4.i4)");
+s.executeUpdate("upsert into " + viewName + "(i1, c3.i3, c4.i4) 
VALUES (2, 2, 2)");
+
+try (ResultSet rs = s.executeQuery("select * from " + viewName + " 
WHERE c3.i3 = 1")) {
+assertTrue(rs.next());
+assertEquals(1, rs.getInt("i1"));
+assertEquals(1, rs.getInt("i2"));
+assertEquals(1, rs.getInt("i3"));
+assertEquals(1, rs.getInt("i4"));
+}
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/aec5101d/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index eff406d..6df2f80 100644
--- 

phoenix git commit: PHOENIX-4536 Change getWAL usage due HBASE-19751

2018-01-19 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 7fa371977 -> 8ac5ec08d


PHOENIX-4536 Change getWAL usage due HBASE-19751


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8ac5ec08
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8ac5ec08
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8ac5ec08

Branch: refs/heads/5.x-HBase-2.0
Commit: 8ac5ec08d6dac484a80d453b44ee2f69609ca6a5
Parents: 7fa3719
Author: Sergey Soldatov 
Authored: Thu Jan 18 13:06:34 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 18 13:06:34 2018 -0800

--
 .../wal/WALReplayWithIndexWritesAndCompressedWALIT.java| 2 +-
 .../hbase/index/write/recovery/TestPerRegionIndexWriteCache.java   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8ac5ec08/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index c8accfd..c0e822f 100644
--- 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
@@ -267,7 +267,7 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
* @throws IOException
*/
   private WAL createWAL(final Configuration c, WALFactory walFactory) throws 
IOException {
-WAL wal = walFactory.getWAL(new byte[]{}, null);
+WAL wal = walFactory.getWAL(null);
 
 // Set down maximum recovery so we dfsclient doesn't linger retrying 
something
 // long gone.

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8ac5ec08/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
index c51ac99..2ec1a8d 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
@@ -106,7 +106,7 @@ public class TestPerRegionIndexWriteCache {
   Random rn = new Random();
   tableName = TableName.valueOf("TestPerRegion" + rn.nextInt());
   WALFactory walFactory = new WALFactory(TEST_UTIL.getConfiguration(), 
null, getClass().getSimpleName());
-  wal = walFactory.getWAL(Bytes.toBytes("logs"), null);
+  wal = 
walFactory.getWAL(RegionInfoBuilder.newBuilder(TableName.valueOf("logs")).build());
 TableDescriptor htd =
 TableDescriptorBuilder
 .newBuilder(tableName)



[3/6] phoenix git commit: PHOENIX-4525 Integer overflow in GroupBy execution

2018-01-11 Thread ssa
PHOENIX-4525 Integer overflow in GroupBy execution


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/964a96d2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/964a96d2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/964a96d2

Branch: refs/heads/4.x-HBase-0.98
Commit: 964a96d29249ec7b61acee0c6607d44671323d27
Parents: f0939f2
Author: Sergey Soldatov 
Authored: Wed Jan 10 13:04:00 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 11 15:58:37 2018 -0800

--
 .../main/java/org/apache/phoenix/util/SizedUtil.java |  2 +-
 .../org/apache/phoenix/memory/MemoryManagerTest.java | 15 +++
 2 files changed, 16 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/964a96d2/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
index f82c1b8..d67ed7f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
@@ -67,7 +67,7 @@ public class SizedUtil {
 
 public static long sizeOfMap(int nRows, int keySize, int valueSize) {
 return SizedUtil.OBJECT_SIZE * 4 + sizeOfArrayList(nRows) /* key set 
*/ + nRows * (
-SizedUtil.MAP_ENTRY_SIZE + /* entry set */
+SizedUtil.MAP_ENTRY_SIZE * 1L + /* entry set */
 keySize + // key size
 valueSize); // value size
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/964a96d2/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
index 6da2526..897bb5b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.junit.Test;
 
@@ -177,4 +178,18 @@ public class MemoryManagerTest {
 // make sure all memory is freed
 assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
 }
+
+/**
+ * Test for SpillableGroupByCache which is using MemoryManager to allocate 
chunks for GroupBy execution
+ * @throws Exception
+ */
+@Test
+public void testCorrectnessOfChunkAllocation() throws Exception {
+for(int i = 1000;i < Integer.MAX_VALUE;) {
+i *=1.5f;
+long result = 
GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
+assertTrue("Size for GroupByMap is negative" , result > 0);
+}
+}
+
 }



[2/6] phoenix git commit: PHOENIX-4525 Integer overflow in GroupBy execution

2018-01-11 Thread ssa
PHOENIX-4525 Integer overflow in GroupBy execution


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7fa37197
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7fa37197
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7fa37197

Branch: refs/heads/5.x-HBase-2.0
Commit: 7fa3719775ee28941f5322063f6a13a17b97be28
Parents: 3d6ef85
Author: Sergey Soldatov 
Authored: Wed Jan 10 13:04:00 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 11 15:58:21 2018 -0800

--
 .../main/java/org/apache/phoenix/util/SizedUtil.java |  2 +-
 .../org/apache/phoenix/memory/MemoryManagerTest.java | 15 +++
 2 files changed, 16 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7fa37197/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
index f82c1b8..d67ed7f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
@@ -67,7 +67,7 @@ public class SizedUtil {
 
 public static long sizeOfMap(int nRows, int keySize, int valueSize) {
 return SizedUtil.OBJECT_SIZE * 4 + sizeOfArrayList(nRows) /* key set 
*/ + nRows * (
-SizedUtil.MAP_ENTRY_SIZE + /* entry set */
+SizedUtil.MAP_ENTRY_SIZE * 1L + /* entry set */
 keySize + // key size
 valueSize); // value size
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7fa37197/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
index 6da2526..897bb5b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.junit.Test;
 
@@ -177,4 +178,18 @@ public class MemoryManagerTest {
 // make sure all memory is freed
 assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
 }
+
+/**
+ * Test for SpillableGroupByCache which is using MemoryManager to allocate 
chunks for GroupBy execution
+ * @throws Exception
+ */
+@Test
+public void testCorrectnessOfChunkAllocation() throws Exception {
+for(int i = 1000;i < Integer.MAX_VALUE;) {
+i *=1.5f;
+long result = 
GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
+assertTrue("Size for GroupByMap is negative" , result > 0);
+}
+}
+
 }



[5/6] phoenix git commit: PHOENIX-4525 Integer overflow in GroupBy execution

2018-01-11 Thread ssa
PHOENIX-4525 Integer overflow in GroupBy execution


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bf0c6599
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bf0c6599
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bf0c6599

Branch: refs/heads/4.x-HBase-1.2
Commit: bf0c659902df34465ff00193beea2dade891c7c8
Parents: a90c154
Author: Sergey Soldatov 
Authored: Wed Jan 10 13:04:00 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 11 15:59:11 2018 -0800

--
 .../main/java/org/apache/phoenix/util/SizedUtil.java |  2 +-
 .../org/apache/phoenix/memory/MemoryManagerTest.java | 15 +++
 2 files changed, 16 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bf0c6599/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
index f82c1b8..d67ed7f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
@@ -67,7 +67,7 @@ public class SizedUtil {
 
 public static long sizeOfMap(int nRows, int keySize, int valueSize) {
 return SizedUtil.OBJECT_SIZE * 4 + sizeOfArrayList(nRows) /* key set 
*/ + nRows * (
-SizedUtil.MAP_ENTRY_SIZE + /* entry set */
+SizedUtil.MAP_ENTRY_SIZE * 1L + /* entry set */
 keySize + // key size
 valueSize); // value size
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/bf0c6599/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
index 6da2526..897bb5b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.junit.Test;
 
@@ -177,4 +178,18 @@ public class MemoryManagerTest {
 // make sure all memory is freed
 assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
 }
+
+/**
+ * Test for SpillableGroupByCache which is using MemoryManager to allocate 
chunks for GroupBy execution
+ * @throws Exception
+ */
+@Test
+public void testCorrectnessOfChunkAllocation() throws Exception {
+for(int i = 1000;i < Integer.MAX_VALUE;) {
+i *=1.5f;
+long result = 
GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
+assertTrue("Size for GroupByMap is negative" , result > 0);
+}
+}
+
 }



[4/6] phoenix git commit: PHOENIX-4525 Integer overflow in GroupBy execution

2018-01-11 Thread ssa
PHOENIX-4525 Integer overflow in GroupBy execution


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/24288ab3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/24288ab3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/24288ab3

Branch: refs/heads/4.x-HBase-1.1
Commit: 24288ab31bd1e4248974be38ccc8635cd93c7204
Parents: a16abdf
Author: Sergey Soldatov 
Authored: Wed Jan 10 13:04:00 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 11 15:59:01 2018 -0800

--
 .../main/java/org/apache/phoenix/util/SizedUtil.java |  2 +-
 .../org/apache/phoenix/memory/MemoryManagerTest.java | 15 +++
 2 files changed, 16 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/24288ab3/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
index f82c1b8..d67ed7f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
@@ -67,7 +67,7 @@ public class SizedUtil {
 
 public static long sizeOfMap(int nRows, int keySize, int valueSize) {
 return SizedUtil.OBJECT_SIZE * 4 + sizeOfArrayList(nRows) /* key set 
*/ + nRows * (
-SizedUtil.MAP_ENTRY_SIZE + /* entry set */
+SizedUtil.MAP_ENTRY_SIZE * 1L + /* entry set */
 keySize + // key size
 valueSize); // value size
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/24288ab3/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
index 6da2526..897bb5b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.junit.Test;
 
@@ -177,4 +178,18 @@ public class MemoryManagerTest {
 // make sure all memory is freed
 assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
 }
+
+/**
+ * Test for SpillableGroupByCache which is using MemoryManager to allocate 
chunks for GroupBy execution
+ * @throws Exception
+ */
+@Test
+public void testCorrectnessOfChunkAllocation() throws Exception {
+for(int i = 1000;i < Integer.MAX_VALUE;) {
+i *=1.5f;
+long result = 
GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
+assertTrue("Size for GroupByMap is negative" , result > 0);
+}
+}
+
 }



[6/6] phoenix git commit: PHOENIX-4525 Integer overflow in GroupBy execution

2018-01-11 Thread ssa
PHOENIX-4525 Integer overflow in GroupBy execution


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/25ded7c4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/25ded7c4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/25ded7c4

Branch: refs/heads/4.x-cdh5.11.2
Commit: 25ded7c462b04af1b757bf086314debe0ad01845
Parents: 3990741
Author: Sergey Soldatov 
Authored: Wed Jan 10 13:04:00 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 11 15:59:21 2018 -0800

--
 .../main/java/org/apache/phoenix/util/SizedUtil.java |  2 +-
 .../org/apache/phoenix/memory/MemoryManagerTest.java | 15 +++
 2 files changed, 16 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/25ded7c4/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
index f82c1b8..d67ed7f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
@@ -67,7 +67,7 @@ public class SizedUtil {
 
 public static long sizeOfMap(int nRows, int keySize, int valueSize) {
 return SizedUtil.OBJECT_SIZE * 4 + sizeOfArrayList(nRows) /* key set 
*/ + nRows * (
-SizedUtil.MAP_ENTRY_SIZE + /* entry set */
+SizedUtil.MAP_ENTRY_SIZE * 1L + /* entry set */
 keySize + // key size
 valueSize); // value size
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/25ded7c4/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
index 6da2526..897bb5b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.junit.Test;
 
@@ -177,4 +178,18 @@ public class MemoryManagerTest {
 // make sure all memory is freed
 assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
 }
+
+/**
+ * Test for SpillableGroupByCache which is using MemoryManager to allocate 
chunks for GroupBy execution
+ * @throws Exception
+ */
+@Test
+public void testCorrectnessOfChunkAllocation() throws Exception {
+for(int i = 1000;i < Integer.MAX_VALUE;) {
+i *=1.5f;
+long result = 
GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
+assertTrue("Size for GroupByMap is negative" , result > 0);
+}
+}
+
 }



[1/6] phoenix git commit: PHOENIX-4525 Integer overflow in GroupBy execution

2018-01-11 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 f0939f235 -> 964a96d29
  refs/heads/4.x-HBase-1.1 a16abdf8c -> 24288ab31
  refs/heads/4.x-HBase-1.2 a90c1547f -> bf0c65990
  refs/heads/4.x-cdh5.11.2 3990741b9 -> 25ded7c46
  refs/heads/5.x-HBase-2.0 3d6ef8525 -> 7fa371977
  refs/heads/master 83adf0d1a -> 3035fb11b


PHOENIX-4525 Integer overflow in GroupBy execution


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3035fb11
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3035fb11
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3035fb11

Branch: refs/heads/master
Commit: 3035fb11b8523c68b70e55d9a0fd1646eb6d15cf
Parents: 83adf0d
Author: Sergey Soldatov 
Authored: Wed Jan 10 13:04:00 2018 -0800
Committer: Sergey Soldatov 
Committed: Thu Jan 11 15:57:52 2018 -0800

--
 .../main/java/org/apache/phoenix/util/SizedUtil.java |  2 +-
 .../org/apache/phoenix/memory/MemoryManagerTest.java | 15 +++
 2 files changed, 16 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3035fb11/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
index f82c1b8..d67ed7f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SizedUtil.java
@@ -67,7 +67,7 @@ public class SizedUtil {
 
 public static long sizeOfMap(int nRows, int keySize, int valueSize) {
 return SizedUtil.OBJECT_SIZE * 4 + sizeOfArrayList(nRows) /* key set 
*/ + nRows * (
-SizedUtil.MAP_ENTRY_SIZE + /* entry set */
+SizedUtil.MAP_ENTRY_SIZE * 1L + /* entry set */
 keySize + // key size
 valueSize); // value size
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3035fb11/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
index 6da2526..897bb5b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/memory/MemoryManagerTest.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.junit.Test;
 
@@ -177,4 +178,18 @@ public class MemoryManagerTest {
 // make sure all memory is freed
 assertTrue(gmm.getAvailableMemory() == gmm.getMaxMemory());
 }
+
+/**
+ * Test for SpillableGroupByCache which is using MemoryManager to allocate 
chunks for GroupBy execution
+ * @throws Exception
+ */
+@Test
+public void testCorrectnessOfChunkAllocation() throws Exception {
+for(int i = 1000;i < Integer.MAX_VALUE;) {
+i *=1.5f;
+long result = 
GroupedAggregateRegionObserver.sizeOfUnorderedGroupByMap(i, 100);
+assertTrue("Size for GroupByMap is negative" , result > 0);
+}
+}
+
 }



[1/6] phoenix git commit: PHOENIX-4456 queryserver script doesn't perform as expected.

2017-12-13 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 c4706109e -> 361c2da6a
  refs/heads/4.x-HBase-1.1 441539d1b -> 9d0a6bb7e
  refs/heads/4.x-HBase-1.2 f93443c5d -> 1625dd620
  refs/heads/4.x-cdh5.11.2 c49de1c5f -> bea0592c2
  refs/heads/5.x-HBase-2.0 a54fd4aa6 -> 5019be449
  refs/heads/master 1a19d1ecb -> 90c724161


PHOENIX-4456 queryserver script doesn't perform as expected.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5019be44
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5019be44
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5019be44

Branch: refs/heads/5.x-HBase-2.0
Commit: 5019be449d3431b71e8245688369fa90a6bf4a82
Parents: a54fd4a
Author: Sergey Soldatov 
Authored: Tue Dec 12 23:20:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 10:15:36 2017 -0800

--
 bin/daemon.py | 16 +---
 1 file changed, 13 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5019be44/bin/daemon.py
--
diff --git a/bin/daemon.py b/bin/daemon.py
index aeebae3..bb64148 100644
--- a/bin/daemon.py
+++ b/bin/daemon.py
@@ -57,6 +57,7 @@ import signal
 import socket
 import atexit
 import fcntl
+import time
 try:
 # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text).
 basestring = basestring
@@ -386,7 +387,7 @@ class DaemonContext:
 change_process_owner(self.uid, self.gid)
 
 if self.detach_process:
-detach_process_context()
+detach_process_context(self.pidfile)
 
 signal_handler_map = self._make_signal_handler_map()
 set_signal_handlers(signal_handler_map)
@@ -657,7 +658,7 @@ def prevent_core_dump():
 resource.setrlimit(core_resource, core_limit)
 
 
-def detach_process_context():
+def detach_process_context(pidfile):
 """ Detach the process context from parent and session.
 
 :return: ``None``.
@@ -683,6 +684,8 @@ def detach_process_context():
 try:
 pid = os.fork()
 if pid > 0:
+while not os.path.exists(pidfile.path):
+time.sleep(0.1)
 os._exit(0)
 except OSError as exc:
 error = DaemonProcessDetachError(
@@ -959,7 +962,14 @@ found at [1].
 def __init__(self, path, enter_err_msg=None):
 self.path = path
 self.enter_err_msg = enter_err_msg
-self.pidfile = None
+self.pidfile = open(self.path, 'a+')
+try:
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_UN)
+self.pidfile.close()
+os.remove(self.path)
+except IOError:
+sys.exit(self.enter_err_msg)
 
 def __enter__(self):
 self.pidfile = open(self.path, 'a+')



[3/6] phoenix git commit: PHOENIX-4456 queryserver script doesn't perform as expected.

2017-12-13 Thread ssa
PHOENIX-4456 queryserver script doesn't perform as expected.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/90c72416
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/90c72416
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/90c72416

Branch: refs/heads/master
Commit: 90c7241611667e3cd3689ce6a72762c6315231ef
Parents: 1a19d1e
Author: Sergey Soldatov 
Authored: Tue Dec 12 23:20:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 10:16:54 2017 -0800

--
 bin/daemon.py | 16 +---
 1 file changed, 13 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/90c72416/bin/daemon.py
--
diff --git a/bin/daemon.py b/bin/daemon.py
index aeebae3..bb64148 100644
--- a/bin/daemon.py
+++ b/bin/daemon.py
@@ -57,6 +57,7 @@ import signal
 import socket
 import atexit
 import fcntl
+import time
 try:
 # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text).
 basestring = basestring
@@ -386,7 +387,7 @@ class DaemonContext:
 change_process_owner(self.uid, self.gid)
 
 if self.detach_process:
-detach_process_context()
+detach_process_context(self.pidfile)
 
 signal_handler_map = self._make_signal_handler_map()
 set_signal_handlers(signal_handler_map)
@@ -657,7 +658,7 @@ def prevent_core_dump():
 resource.setrlimit(core_resource, core_limit)
 
 
-def detach_process_context():
+def detach_process_context(pidfile):
 """ Detach the process context from parent and session.
 
 :return: ``None``.
@@ -683,6 +684,8 @@ def detach_process_context():
 try:
 pid = os.fork()
 if pid > 0:
+while not os.path.exists(pidfile.path):
+time.sleep(0.1)
 os._exit(0)
 except OSError as exc:
 error = DaemonProcessDetachError(
@@ -959,7 +962,14 @@ found at [1].
 def __init__(self, path, enter_err_msg=None):
 self.path = path
 self.enter_err_msg = enter_err_msg
-self.pidfile = None
+self.pidfile = open(self.path, 'a+')
+try:
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_UN)
+self.pidfile.close()
+os.remove(self.path)
+except IOError:
+sys.exit(self.enter_err_msg)
 
 def __enter__(self):
 self.pidfile = open(self.path, 'a+')



[5/6] phoenix git commit: PHOENIX-4456 queryserver script doesn't perform as expected.

2017-12-13 Thread ssa
PHOENIX-4456 queryserver script doesn't perform as expected.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9d0a6bb7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9d0a6bb7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9d0a6bb7

Branch: refs/heads/4.x-HBase-1.1
Commit: 9d0a6bb7eed0b804f85b3a866b82332278c7161d
Parents: 441539d
Author: Sergey Soldatov 
Authored: Tue Dec 12 23:20:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 10:18:15 2017 -0800

--
 bin/daemon.py | 16 +---
 1 file changed, 13 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9d0a6bb7/bin/daemon.py
--
diff --git a/bin/daemon.py b/bin/daemon.py
index aeebae3..bb64148 100644
--- a/bin/daemon.py
+++ b/bin/daemon.py
@@ -57,6 +57,7 @@ import signal
 import socket
 import atexit
 import fcntl
+import time
 try:
 # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text).
 basestring = basestring
@@ -386,7 +387,7 @@ class DaemonContext:
 change_process_owner(self.uid, self.gid)
 
 if self.detach_process:
-detach_process_context()
+detach_process_context(self.pidfile)
 
 signal_handler_map = self._make_signal_handler_map()
 set_signal_handlers(signal_handler_map)
@@ -657,7 +658,7 @@ def prevent_core_dump():
 resource.setrlimit(core_resource, core_limit)
 
 
-def detach_process_context():
+def detach_process_context(pidfile):
 """ Detach the process context from parent and session.
 
 :return: ``None``.
@@ -683,6 +684,8 @@ def detach_process_context():
 try:
 pid = os.fork()
 if pid > 0:
+while not os.path.exists(pidfile.path):
+time.sleep(0.1)
 os._exit(0)
 except OSError as exc:
 error = DaemonProcessDetachError(
@@ -959,7 +962,14 @@ found at [1].
 def __init__(self, path, enter_err_msg=None):
 self.path = path
 self.enter_err_msg = enter_err_msg
-self.pidfile = None
+self.pidfile = open(self.path, 'a+')
+try:
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_UN)
+self.pidfile.close()
+os.remove(self.path)
+except IOError:
+sys.exit(self.enter_err_msg)
 
 def __enter__(self):
 self.pidfile = open(self.path, 'a+')



[6/6] phoenix git commit: PHOENIX-4456 queryserver script doesn't perform as expected.

2017-12-13 Thread ssa
PHOENIX-4456 queryserver script doesn't perform as expected.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/361c2da6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/361c2da6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/361c2da6

Branch: refs/heads/4.x-HBase-0.98
Commit: 361c2da6abdc10908021a807c7022b60a3e444d1
Parents: c470610
Author: Sergey Soldatov 
Authored: Tue Dec 12 23:20:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 10:18:45 2017 -0800

--
 bin/daemon.py | 16 +---
 1 file changed, 13 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/361c2da6/bin/daemon.py
--
diff --git a/bin/daemon.py b/bin/daemon.py
index aeebae3..bb64148 100644
--- a/bin/daemon.py
+++ b/bin/daemon.py
@@ -57,6 +57,7 @@ import signal
 import socket
 import atexit
 import fcntl
+import time
 try:
 # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text).
 basestring = basestring
@@ -386,7 +387,7 @@ class DaemonContext:
 change_process_owner(self.uid, self.gid)
 
 if self.detach_process:
-detach_process_context()
+detach_process_context(self.pidfile)
 
 signal_handler_map = self._make_signal_handler_map()
 set_signal_handlers(signal_handler_map)
@@ -657,7 +658,7 @@ def prevent_core_dump():
 resource.setrlimit(core_resource, core_limit)
 
 
-def detach_process_context():
+def detach_process_context(pidfile):
 """ Detach the process context from parent and session.
 
 :return: ``None``.
@@ -683,6 +684,8 @@ def detach_process_context():
 try:
 pid = os.fork()
 if pid > 0:
+while not os.path.exists(pidfile.path):
+time.sleep(0.1)
 os._exit(0)
 except OSError as exc:
 error = DaemonProcessDetachError(
@@ -959,7 +962,14 @@ found at [1].
 def __init__(self, path, enter_err_msg=None):
 self.path = path
 self.enter_err_msg = enter_err_msg
-self.pidfile = None
+self.pidfile = open(self.path, 'a+')
+try:
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_UN)
+self.pidfile.close()
+os.remove(self.path)
+except IOError:
+sys.exit(self.enter_err_msg)
 
 def __enter__(self):
 self.pidfile = open(self.path, 'a+')



[4/6] phoenix git commit: PHOENIX-4456 queryserver script doesn't perform as expected.

2017-12-13 Thread ssa
PHOENIX-4456 queryserver script doesn't perform as expected.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1625dd62
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1625dd62
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1625dd62

Branch: refs/heads/4.x-HBase-1.2
Commit: 1625dd62006301a612dde3b8fbfcbc15642a24dc
Parents: f93443c
Author: Sergey Soldatov 
Authored: Tue Dec 12 23:20:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 10:17:24 2017 -0800

--
 bin/daemon.py | 16 +---
 1 file changed, 13 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1625dd62/bin/daemon.py
--
diff --git a/bin/daemon.py b/bin/daemon.py
index aeebae3..bb64148 100644
--- a/bin/daemon.py
+++ b/bin/daemon.py
@@ -57,6 +57,7 @@ import signal
 import socket
 import atexit
 import fcntl
+import time
 try:
 # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text).
 basestring = basestring
@@ -386,7 +387,7 @@ class DaemonContext:
 change_process_owner(self.uid, self.gid)
 
 if self.detach_process:
-detach_process_context()
+detach_process_context(self.pidfile)
 
 signal_handler_map = self._make_signal_handler_map()
 set_signal_handlers(signal_handler_map)
@@ -657,7 +658,7 @@ def prevent_core_dump():
 resource.setrlimit(core_resource, core_limit)
 
 
-def detach_process_context():
+def detach_process_context(pidfile):
 """ Detach the process context from parent and session.
 
 :return: ``None``.
@@ -683,6 +684,8 @@ def detach_process_context():
 try:
 pid = os.fork()
 if pid > 0:
+while not os.path.exists(pidfile.path):
+time.sleep(0.1)
 os._exit(0)
 except OSError as exc:
 error = DaemonProcessDetachError(
@@ -959,7 +962,14 @@ found at [1].
 def __init__(self, path, enter_err_msg=None):
 self.path = path
 self.enter_err_msg = enter_err_msg
-self.pidfile = None
+self.pidfile = open(self.path, 'a+')
+try:
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_UN)
+self.pidfile.close()
+os.remove(self.path)
+except IOError:
+sys.exit(self.enter_err_msg)
 
 def __enter__(self):
 self.pidfile = open(self.path, 'a+')



[2/6] phoenix git commit: PHOENIX-4456 queryserver script doesn't perform as expected.

2017-12-13 Thread ssa
PHOENIX-4456 queryserver script doesn't perform as expected.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bea0592c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bea0592c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bea0592c

Branch: refs/heads/4.x-cdh5.11.2
Commit: bea0592c267ba40e3a8f115af19772c1ff82af1d
Parents: c49de1c
Author: Sergey Soldatov 
Authored: Tue Dec 12 23:20:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 10:16:43 2017 -0800

--
 bin/daemon.py | 16 +---
 1 file changed, 13 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bea0592c/bin/daemon.py
--
diff --git a/bin/daemon.py b/bin/daemon.py
index aeebae3..bb64148 100644
--- a/bin/daemon.py
+++ b/bin/daemon.py
@@ -57,6 +57,7 @@ import signal
 import socket
 import atexit
 import fcntl
+import time
 try:
 # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text).
 basestring = basestring
@@ -386,7 +387,7 @@ class DaemonContext:
 change_process_owner(self.uid, self.gid)
 
 if self.detach_process:
-detach_process_context()
+detach_process_context(self.pidfile)
 
 signal_handler_map = self._make_signal_handler_map()
 set_signal_handlers(signal_handler_map)
@@ -657,7 +658,7 @@ def prevent_core_dump():
 resource.setrlimit(core_resource, core_limit)
 
 
-def detach_process_context():
+def detach_process_context(pidfile):
 """ Detach the process context from parent and session.
 
 :return: ``None``.
@@ -683,6 +684,8 @@ def detach_process_context():
 try:
 pid = os.fork()
 if pid > 0:
+while not os.path.exists(pidfile.path):
+time.sleep(0.1)
 os._exit(0)
 except OSError as exc:
 error = DaemonProcessDetachError(
@@ -959,7 +962,14 @@ found at [1].
 def __init__(self, path, enter_err_msg=None):
 self.path = path
 self.enter_err_msg = enter_err_msg
-self.pidfile = None
+self.pidfile = open(self.path, 'a+')
+try:
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+fcntl.flock(self.pidfile.fileno(), fcntl.LOCK_UN)
+self.pidfile.close()
+os.remove(self.path)
+except IOError:
+sys.exit(self.enter_err_msg)
 
 def __enter__(self):
 self.pidfile = open(self.path, 'a+')



phoenix git commit: PHOENIX-4457 Account for the Table interface addition of checkAndMutate

2017-12-13 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 b3edf3903 -> a54fd4aa6


PHOENIX-4457 Account for the Table interface addition of checkAndMutate


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a54fd4aa
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a54fd4aa
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a54fd4aa

Branch: refs/heads/5.x-HBase-2.0
Commit: a54fd4aa6c6ebe86f1ca7dd996a715b44c9a0948
Parents: b3edf39
Author: Sergey Soldatov 
Authored: Wed Dec 13 09:52:57 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 13 09:52:57 2017 -0800

--
 .../java/org/apache/phoenix/execute/DelegateHTable.java | 5 +
 .../apache/phoenix/transaction/OmidTransactionTable.java| 6 ++
 .../apache/phoenix/transaction/TephraTransactionTable.java  | 5 +
 .../apache/phoenix/transaction/TransactionAwareHTable.java  | 9 +
 4 files changed, 25 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/a54fd4aa/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
index 15d5cf6..c953c8d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
@@ -271,6 +271,11 @@ public class DelegateHTable implements Table {
 }
 
 @Override
+public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+return delegate.checkAndMutate(row, family);
+}
+
+@Override
 public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, 
CompareOperator op,
 byte[] value, RowMutations mutation) throws IOException {
 return delegate.checkAndMutate(row, family, qualifier, op, value, 
mutation);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a54fd4aa/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
index 78d7e4c..0874e45 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
@@ -317,6 +317,12 @@ public class OmidTransactionTable implements 
PhoenixTransactionalTable {
 }
 
 @Override
+public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+// TODO Auto-generated method stub
+return null;
+}
+
+@Override
 public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, 
CompareOperator op, byte[] value,
 RowMutations mutation) throws IOException {
 // TODO Auto-generated method stub

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a54fd4aa/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionTable.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionTable.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionTable.java
index f9de869..e28e98b 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionTable.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionTable.java
@@ -302,6 +302,11 @@ public class TephraTransactionTable implements 
PhoenixTransactionalTable {
 }
 
 @Override
+public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+return transactionAwareHTable.checkAndMutate(row, family);
+}
+
+@Override
 public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, 
CompareOperator op,
 byte[] value, RowMutations mutation) throws IOException {
 return transactionAwareHTable.checkAndMutate(row, family, qualifier, 
op, value, mutation);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a54fd4aa/phoenix-core/src/main/java/org/apache/phoenix/transaction/TransactionAwareHTable.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TransactionAwareHTable.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TransactionAwareHTable.java
index 62d3286..8499ac2 100644
--- 

phoenix git commit: PHOENIX-4452 change usage of WALKey to WALKeyImpl due HBASE-19134

2017-12-12 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 25d3c1369 -> b3edf3903


PHOENIX-4452 change usage of WALKey to WALKeyImpl due HBASE-19134


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b3edf390
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b3edf390
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b3edf390

Branch: refs/heads/5.x-HBase-2.0
Commit: b3edf390353c19b8b4c5de2cca35a97457743768
Parents: 25d3c13
Author: Sergey Soldatov 
Authored: Tue Dec 12 10:12:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Tue Dec 12 13:45:30 2017 -0800

--
 .../replication/SystemCatalogWALEntryFilterIT.java| 10 +-
 1 file changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b3edf390/phoenix-core/src/it/java/org/apache/phoenix/replication/SystemCatalogWALEntryFilterIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/replication/SystemCatalogWALEntryFilterIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/replication/SystemCatalogWALEntryFilterIT.java
index bb10ae0..ae8b797 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/replication/SystemCatalogWALEntryFilterIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/replication/SystemCatalogWALEntryFilterIT.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALEdit;
-import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WALKeyImpl;
 import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.mapreduce.util.ConnectionUtil;
@@ -73,7 +73,7 @@ public class SystemCatalogWALEntryFilterIT extends 
ParallelStatsDisabledIT {
   private static final String DROP_TENANT_VIEW_SQL = "DROP VIEW IF EXISTS " + 
TENANT_VIEW_NAME;
   private static final String DROP_NONTENANT_VIEW_SQL = "DROP VIEW IF EXISTS " 
+ NONTENANT_VIEW_NAME;
   private static PTable catalogTable;
-  private static WALKey walKey = null;
+  private static WALKeyImpl walKey = null;
   private static TableName systemCatalogTableName =
   TableName.valueOf(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME);
 
@@ -89,7 +89,7 @@ public class SystemCatalogWALEntryFilterIT extends 
ParallelStatsDisabledIT {
   ensureTableCreated(getUrl(), TestUtil.ENTITY_HISTORY_TABLE_NAME);
   connection.createStatement().execute(CREATE_TENANT_VIEW_SQL);
   catalogTable = PhoenixRuntime.getTable(connection, 
PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME);
-  walKey = new WALKey(REGION, 
TableName.valueOf(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME), 0, 0, uuid);
+  walKey = new WALKeyImpl(REGION, 
TableName.valueOf(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME), 0, 0, uuid);
 };
 Assert.assertNotNull(catalogTable);
 try (java.sql.Connection connection =
@@ -115,7 +115,7 @@ public class SystemCatalogWALEntryFilterIT extends 
ParallelStatsDisabledIT {
   @Test
   public void testOtherTablesAutoPass() throws Exception {
 //Cell is nonsense but we should auto pass because the table name's not 
System.Catalog
-WAL.Entry entry = new WAL.Entry(new WALKey(REGION,
+WAL.Entry entry = new WAL.Entry(new WALKeyImpl(REGION,
 TableName.valueOf(TestUtil.ENTITY_HISTORY_TABLE_NAME), 
System.currentTimeMillis()), new WALEdit());
 entry.getEdit().add(CellUtil.createCell(Bytes.toBytes("foo")));
 SystemCatalogWALEntryFilter filter = new SystemCatalogWALEntryFilter();
@@ -232,7 +232,7 @@ public class SystemCatalogWALEntryFilterIT extends 
ParallelStatsDisabledIT {
   }
   Assert.assertTrue("Didn't retrieve any cells from SYSTEM.CATALOG",
   edit.getCells().size() > 0);
-  WALKey key = new WALKey(REGION, tableName, 0, 0, uuid);
+  WALKeyImpl key = new WALKeyImpl(REGION, tableName, 0, 0, uuid);
   entry = new WAL.Entry(key, edit);
 }
 return entry;



phoenix git commit: PHOENIX-4441 Reflect changes that were made recently in HBase branch-2 Changes required after : HBASE-19417. Changed method signature in RegionObserver HBASE-19430. Removed setTime

2017-12-07 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 491486954 -> 04b27d542


PHOENIX-4441 Reflect changes that were made recently in HBase branch-2 Changes 
required after : HBASE-19417. Changed method signature in RegionObserver 
HBASE-19430. Removed setTimestamp(byte[], int) method from ExtendedCell.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/04b27d54
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/04b27d54
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/04b27d54

Branch: refs/heads/5.x-HBase-2.0
Commit: 04b27d542120707446a70575d02cbd31b63ea050
Parents: 4914869
Author: Sergey Soldatov 
Authored: Wed Dec 6 23:27:37 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Dec 7 14:09:23 2017 -0800

--
 .../org/apache/phoenix/coprocessor/DelegateRegionObserver.java | 6 +++---
 .../org/apache/phoenix/coprocessor/SequenceRegionObserver.java | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/04b27d54/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DelegateRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DelegateRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DelegateRegionObserver.java
index a65f78f..94cdb1e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DelegateRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/DelegateRegionObserver.java
@@ -320,10 +320,10 @@ public class DelegateRegionObserver implements 
RegionObserver {
 }
 
 @Override
-public boolean 
postBulkLoadHFile(ObserverContext ctx,
-List> stagingFamilyPaths, Map finalPaths, boolean hasLoaded)
+public void 
postBulkLoadHFile(ObserverContext ctx,
+List> stagingFamilyPaths, Map finalPaths)
 throws IOException {
-return delegate.postBulkLoadHFile(ctx, stagingFamilyPaths, finalPaths, 
hasLoaded);
+delegate.postBulkLoadHFile(ctx, stagingFamilyPaths, finalPaths);
 }
 


http://git-wip-us.apache.org/repos/asf/phoenix/blob/04b27d54/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
index 75ab61b..4c2ec1a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
@@ -422,7 +422,7 @@ public class SequenceRegionObserver implements 
RegionObserver, RegionCoprocessor
 if (!hadClientTimestamp) {
 for (List kvs : m.getFamilyCellMap().values()) {
 for (Cell kv : kvs) {
-
((ExtendedCell)kv).setTimestamp(clientTimestampBuf, 0);
+
((ExtendedCell)kv).setTimestamp(clientTimestampBuf);
 }
 }
 }



phoenix git commit: PHOENIX-4438 IT tests require additional configuration for HBase 2.0

2017-12-06 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 b973717fe -> 491486954


PHOENIX-4438 IT tests require additional configuration for HBase 2.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/49148695
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/49148695
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/49148695

Branch: refs/heads/5.x-HBase-2.0
Commit: 4914869545791af1efc011ae1b01e26af0db6c47
Parents: b973717
Author: Sergey Soldatov 
Authored: Wed Dec 6 23:26:52 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 6 23:38:31 2017 -0800

--
 pom.xml | 10 +-
 1 file changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/49148695/pom.xml
--
diff --git a/pom.xml b/pom.xml
index e7ccf0c..295c94d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -120,7 +120,7 @@
 
 
 8
-8
+4
 false
 false
 
@@ -242,7 +242,7 @@
 alphabetical
 
--Xmx2000m -XX:MaxPermSize=256m 
-Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops 
-XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC 
-XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled 
-XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/
+-Xmx2000m -XX:MaxPermSize=256m 
-Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops 
-XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC 
-XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled 
-XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ 
-Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.
 
${test.output.tofile}
 kill
 
${basedir}/src/it/java
@@ -271,7 +271,7 @@
 at 
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2835)
 at 
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:490)
 -->

--Xmx3000m -XX:MaxPermSize=256m 
-Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops 
-XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC 
-XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled 
-XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/
+-Xmx3000m -XX:MaxPermSize=256m 
-Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops 
-XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC 
-XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled 
-XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ 
-Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.
 
${test.output.tofile}
 kill
 
${basedir}/src/it/java
@@ -289,7 +289,7 @@
 ${numForkedIT}
 alphabetical
 true
--enableassertions -Xmx2000m -XX:MaxPermSize=128m 
-Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/
+-enableassertions -Xmx2000m -XX:MaxPermSize=128m 
-Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ 
-Dorg.apache.hadoop.hbase.shaded.io.netty.packagePrefix=org.apache.hadoop.hbase.shaded.
 
${test.output.tofile}
 
${basedir}/src/it/java
 
org.apache.phoenix.end2end.HBaseManagedTimeTest
@@ -307,7 +307,7 @@
  ${numForkedIT}
  alphabetical
  false
- -enableassertions -Xmx2000m -XX:MaxPermSize=256m 
-Djava.security.egd=file:/dev/./urandom 

[3/5] phoenix git commit: PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are using in hadoop ecosystem

2017-12-06 Thread ssa
PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are 
using in hadoop ecosystem


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bfec658d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bfec658d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bfec658d

Branch: refs/heads/4.x-HBase-1.2
Commit: bfec658d0c54f07970c6a96867e8950e0bd69d71
Parents: c9a695a
Author: Sergey Soldatov 
Authored: Wed Dec 6 11:23:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 6 14:04:20 2017 -0800

--
 bin/queryserver.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bfec658d/bin/queryserver.py
--
diff --git a/bin/queryserver.py b/bin/queryserver.py
index fefe0a5..711bcc4 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -76,7 +76,7 @@ hadoop_config_path = phoenix_utils.hadoop_conf
 hadoop_classpath = phoenix_utils.hadoop_classpath
 
 # TODO: add windows support
-phoenix_file_basename = '%s-queryserver' % getpass.getuser()
+phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
 phoenix_log_file = '%s.log' % phoenix_file_basename
 phoenix_out_file = '%s.out' % phoenix_file_basename
 phoenix_pid_file = '%s.pid' % phoenix_file_basename



[5/5] phoenix git commit: PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are using in hadoop ecosystem

2017-12-06 Thread ssa
PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are 
using in hadoop ecosystem


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3634aac1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3634aac1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3634aac1

Branch: refs/heads/4.x-HBase-1.1
Commit: 3634aac10a1171a98c5e4e5d843547d6a818c146
Parents: 21437b1
Author: Sergey Soldatov 
Authored: Wed Dec 6 11:23:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 6 14:13:57 2017 -0800

--
 bin/queryserver.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3634aac1/bin/queryserver.py
--
diff --git a/bin/queryserver.py b/bin/queryserver.py
index fefe0a5..711bcc4 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -76,7 +76,7 @@ hadoop_config_path = phoenix_utils.hadoop_conf
 hadoop_classpath = phoenix_utils.hadoop_classpath
 
 # TODO: add windows support
-phoenix_file_basename = '%s-queryserver' % getpass.getuser()
+phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
 phoenix_log_file = '%s.log' % phoenix_file_basename
 phoenix_out_file = '%s.out' % phoenix_file_basename
 phoenix_pid_file = '%s.pid' % phoenix_file_basename



[2/5] phoenix git commit: PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are using in hadoop ecosystem

2017-12-06 Thread ssa
PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are 
using in hadoop ecosystem


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ee728a4d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ee728a4d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ee728a4d

Branch: refs/heads/master
Commit: ee728a4d19c004ad456b24cd228fb2351362472d
Parents: 25359a9
Author: Sergey Soldatov 
Authored: Wed Dec 6 11:23:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 6 14:03:26 2017 -0800

--
 bin/queryserver.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ee728a4d/bin/queryserver.py
--
diff --git a/bin/queryserver.py b/bin/queryserver.py
index fefe0a5..711bcc4 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -76,7 +76,7 @@ hadoop_config_path = phoenix_utils.hadoop_conf
 hadoop_classpath = phoenix_utils.hadoop_classpath
 
 # TODO: add windows support
-phoenix_file_basename = '%s-queryserver' % getpass.getuser()
+phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
 phoenix_log_file = '%s.log' % phoenix_file_basename
 phoenix_out_file = '%s.out' % phoenix_file_basename
 phoenix_pid_file = '%s.pid' % phoenix_file_basename



[4/5] phoenix git commit: PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are using in hadoop ecosystem

2017-12-06 Thread ssa
PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are 
using in hadoop ecosystem


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1334b26b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1334b26b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1334b26b

Branch: refs/heads/4.x-cdh5.11.2
Commit: 1334b26b839652bb0bdebee56c81664706a2fc62
Parents: a99dc69
Author: Sergey Soldatov 
Authored: Wed Dec 6 11:23:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 6 14:13:22 2017 -0800

--
 bin/queryserver.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1334b26b/bin/queryserver.py
--
diff --git a/bin/queryserver.py b/bin/queryserver.py
index fefe0a5..711bcc4 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -76,7 +76,7 @@ hadoop_config_path = phoenix_utils.hadoop_conf
 hadoop_classpath = phoenix_utils.hadoop_classpath
 
 # TODO: add windows support
-phoenix_file_basename = '%s-queryserver' % getpass.getuser()
+phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
 phoenix_log_file = '%s.log' % phoenix_file_basename
 phoenix_out_file = '%s.out' % phoenix_file_basename
 phoenix_pid_file = '%s.pid' % phoenix_file_basename



[1/5] phoenix git commit: PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are using in hadoop ecosystem

2017-12-06 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 21437b151 -> 3634aac10
  refs/heads/4.x-HBase-1.2 c9a695a38 -> bfec658d0
  refs/heads/4.x-cdh5.11.2 a99dc69ce -> 1334b26b8
  refs/heads/5.x-HBase-2.0 9855dc931 -> b973717fe
  refs/heads/master 25359a95f -> ee728a4d1


PHOENIX-4439 QueryServer pid file name doesn't comply the usual schema we are 
using in hadoop ecosystem


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b973717f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b973717f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b973717f

Branch: refs/heads/5.x-HBase-2.0
Commit: b973717fe69fcc0976e46ce4bcab1dee42b22946
Parents: 9855dc9
Author: Sergey Soldatov 
Authored: Wed Dec 6 11:23:48 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Dec 6 14:01:56 2017 -0800

--
 bin/queryserver.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b973717f/bin/queryserver.py
--
diff --git a/bin/queryserver.py b/bin/queryserver.py
index fefe0a5..711bcc4 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -76,7 +76,7 @@ hadoop_config_path = phoenix_utils.hadoop_conf
 hadoop_classpath = phoenix_utils.hadoop_classpath
 
 # TODO: add windows support
-phoenix_file_basename = '%s-queryserver' % getpass.getuser()
+phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
 phoenix_log_file = '%s.log' % phoenix_file_basename
 phoenix_out_file = '%s.out' % phoenix_file_basename
 phoenix_pid_file = '%s.pid' % phoenix_file_basename



phoenix git commit: PHOENIX-4429 QueryServer fails to start with missing jetty Locker class

2017-12-04 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 321cd47d8 -> 9855dc931


PHOENIX-4429 QueryServer fails to start with missing jetty Locker class


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9855dc93
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9855dc93
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9855dc93

Branch: refs/heads/5.x-HBase-2.0
Commit: 9855dc93145b4d1e5c3c9c92e7993ee9ec42c3bb
Parents: 321cd47
Author: Sergey Soldatov 
Authored: Mon Dec 4 17:40:15 2017 -0800
Committer: Sergey Soldatov 
Committed: Mon Dec 4 17:40:15 2017 -0800

--
 pom.xml | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9855dc93/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 4bb8390..e7ccf0c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -941,6 +941,11 @@
   
   
 org.eclipse.jetty
+jetty-util
+${jetty.version}
+  
+  
+org.eclipse.jetty
 jetty-security
 ${jetty.version}
   



[1/3] phoenix git commit: PHOENIX-4321 Replace deprecated HBaseAdmin with Admin

2017-11-14 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 c85e06581 -> 693fa6598


http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestIndexWriter.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestIndexWriter.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestIndexWriter.java
index f6e00cc..b302210 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestIndexWriter.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestIndexWriter.java
@@ -47,8 +47,8 @@ import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.VersionInfo;
+import org.apache.phoenix.hbase.index.IndexTableName;
 import org.apache.phoenix.hbase.index.StubAbortable;
-import org.apache.phoenix.hbase.index.TableName;
 import org.apache.phoenix.hbase.index.exception.IndexWriteException;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.junit.Rule;
@@ -60,7 +60,7 @@ import org.mockito.stubbing.Answer;
 public class TestIndexWriter {
   private static final Log LOG = LogFactory.getLog(TestIndexWriter.class);
   @Rule
-  public TableName testName = new TableName();
+  public IndexTableName testName = new IndexTableName();
   private final byte[] row = Bytes.toBytes("row");
 
   @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleIndexWriter.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleIndexWriter.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleIndexWriter.java
index 8573fb1..1fe0342 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleIndexWriter.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleIndexWriter.java
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.VersionInfo;
+import org.apache.phoenix.hbase.index.IndexTableName;
 import org.apache.phoenix.hbase.index.StubAbortable;
-import org.apache.phoenix.hbase.index.TableName;
 import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.junit.Rule;
@@ -54,7 +54,7 @@ public class TestParalleIndexWriter {
 
   private static final Log LOG = 
LogFactory.getLog(TestParalleIndexWriter.class);
   @Rule
-  public TableName test = new TableName();
+  public IndexTableName test = new IndexTableName();
   private final byte[] row = Bytes.toBytes("row");
 
   @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleWriterIndexCommitter.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleWriterIndexCommitter.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleWriterIndexCommitter.java
index 2377ff1..79bc295 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleWriterIndexCommitter.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestParalleWriterIndexCommitter.java
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.VersionInfo;
+import org.apache.phoenix.hbase.index.IndexTableName;
 import org.apache.phoenix.hbase.index.StubAbortable;
-import org.apache.phoenix.hbase.index.TableName;
 import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.junit.Rule;
@@ -54,7 +54,7 @@ public class TestParalleWriterIndexCommitter {
 
   private static final Log LOG = 
LogFactory.getLog(TestParalleWriterIndexCommitter.class);
   @Rule
-  public TableName test = new TableName();
+  public IndexTableName test = new IndexTableName();
   private final byte[] row = Bytes.toBytes("row");
 
   @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java
 

[3/3] phoenix git commit: PHOENIX-4321 Replace deprecated HBaseAdmin with Admin

2017-11-14 Thread ssa
PHOENIX-4321 Replace deprecated HBaseAdmin with Admin


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/693fa659
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/693fa659
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/693fa659

Branch: refs/heads/5.x-HBase-2.0
Commit: 693fa6598df41c2cbd8111e465cf98d1b3ba1ec4
Parents: c85e065
Author: Sergey Soldatov 
Authored: Thu Nov 9 13:29:50 2017 -0800
Committer: Sergey Soldatov 
Committed: Tue Nov 14 22:12:35 2017 -0800

--
 ...ReplayWithIndexWritesAndCompressedWALIT.java | 13 +--
 .../StatisticsCollectionRunTrackerIT.java   | 11 +--
 .../phoenix/end2end/AggregateQueryIT.java   |  8 +-
 .../apache/phoenix/end2end/AlterTableIT.java| 11 ++-
 .../end2end/ColumnProjectionOptimizationIT.java |  9 +-
 .../apache/phoenix/end2end/CreateSchemaIT.java  |  4 +-
 .../apache/phoenix/end2end/CreateTableIT.java   | 45 -
 .../phoenix/end2end/DisableLocalIndexIT.java|  7 +-
 .../apache/phoenix/end2end/DropSchemaIT.java|  4 +-
 .../apache/phoenix/end2end/DynamicColumnIT.java |  4 +-
 .../phoenix/end2end/FlappingAlterTableIT.java   | 12 +--
 .../phoenix/end2end/FlappingLocalIndexIT.java   |  8 +-
 .../phoenix/end2end/LocalIndexSplitMergeIT.java |  6 +-
 .../phoenix/end2end/MappingTableDataTypeIT.java |  6 +-
 .../end2end/NamespaceSchemaMappingIT.java   |  8 +-
 .../phoenix/end2end/NativeHBaseTypesIT.java |  5 +-
 .../phoenix/end2end/ProductMetricsIT.java   |  7 +-
 .../end2end/QueryDatabaseMetaDataIT.java| 21 +++--
 .../apache/phoenix/end2end/ReverseScanIT.java   |  8 --
 .../apache/phoenix/end2end/SetPropertyIT.java   | 96 +--
 .../end2end/SkipScanAfterManualSplitIT.java | 11 ++-
 .../apache/phoenix/end2end/SkipScanQueryIT.java |  7 +-
 .../end2end/TableSnapshotReadsMapReduceIT.java  |  8 +-
 .../end2end/TenantSpecificTablesDDLIT.java  |  4 +-
 .../org/apache/phoenix/end2end/UpgradeIT.java   | 20 ++--
 .../org/apache/phoenix/end2end/UseSchemaIT.java |  3 +-
 .../java/org/apache/phoenix/end2end/ViewIT.java |  6 +-
 .../phoenix/end2end/index/BaseIndexIT.java  |  4 +-
 .../phoenix/end2end/index/DropMetadataIT.java   |  4 +-
 .../phoenix/end2end/index/LocalIndexIT.java | 19 ++--
 .../phoenix/end2end/index/MutableIndexIT.java   | 10 +-
 .../index/MutableIndexReplicationIT.java|  9 +-
 .../end2end/index/MutableIndexSplitIT.java  | 10 +-
 .../end2end/index/PartialIndexRebuilderIT.java  |  9 +-
 .../end2end/index/txn/MutableRollbackIT.java|  6 +-
 .../UpsertSelectOverlappingBatchesIT.java   |  6 +-
 .../FailForUnsupportedHBaseVersionsIT.java  |  4 +-
 .../iterate/RoundRobinResultIteratorIT.java | 11 ++-
 .../apache/phoenix/rpc/PhoenixServerRpcIT.java  | 13 +--
 .../phoenix/tx/ParameterizedTransactionIT.java  | 10 +-
 .../hbase/index/write/RecoveryIndexWriter.java  | 10 +-
 .../mapreduce/index/IndexScrutinyTool.java  |  4 +-
 .../phoenix/mapreduce/index/IndexTool.java  |  4 +-
 .../phoenix/query/ConnectionQueryServices.java  |  4 +-
 .../query/ConnectionQueryServicesImpl.java  | 98 ++--
 .../query/ConnectionlessQueryServicesImpl.java  |  4 +-
 .../query/DelegateConnectionQueryServices.java  |  4 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  6 +-
 .../org/apache/phoenix/util/UpgradeUtil.java| 57 ++--
 .../phoenix/hbase/index/IndexTableName.java | 45 +
 .../apache/phoenix/hbase/index/TableName.java   | 45 -
 .../index/parallel/TestThreadPoolBuilder.java   |  6 +-
 .../index/parallel/TestThreadPoolManager.java   |  7 +-
 .../index/util/TestIndexManagementUtil.java |  8 +-
 .../hbase/index/write/TestIndexWriter.java  |  4 +-
 .../index/write/TestParalleIndexWriter.java |  4 +-
 .../write/TestParalleWriterIndexCommitter.java  |  4 +-
 .../index/write/TestWALRecoveryCaching.java |  8 +-
 .../java/org/apache/phoenix/query/BaseTest.java |  6 +-
 .../query/ConnectionQueryServicesImplTest.java  |  6 +-
 .../java/org/apache/phoenix/util/TestUtil.java  | 25 ++---
 .../org/apache/phoenix/flume/PhoenixSinkIT.java |  7 +-
 .../apache/phoenix/hive/util/PhoenixUtil.java   |  4 +-
 63 files changed, 422 insertions(+), 415 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/693fa659/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/WALReplayWithIndexWritesAndCompressedWALIT.java
index 9566e48..67b7df3 100644
--- 

[2/3] phoenix git commit: PHOENIX-3112 Partial row scan not handled correctly

2017-10-03 Thread ssa
PHOENIX-3112 Partial row scan not handled correctly


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e5a9c72e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e5a9c72e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e5a9c72e

Branch: refs/heads/4.x-HBase-1.2
Commit: e5a9c72e368378f35fec8df3abdeb77cae3d9f64
Parents: d231db8
Author: Sergey Soldatov 
Authored: Wed Aug 2 16:56:04 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Oct 3 00:44:01 2017 -0700

--
 .../PartialResultServerConfigurationIT.java | 148 ++
 .../PartialScannerResultsDisabledIT.java| 193 +++
 .../DataTableLocalIndexRegionScanner.java   |   7 +-
 .../hbase/regionserver/ScannerContextUtil.java  |  41 
 .../phoenix/coprocessor/BaseRegionScanner.java  |   4 +-
 .../coprocessor/BaseScannerRegionObserver.java  |  11 +-
 .../coprocessor/DelegateRegionScanner.java  |   4 +-
 .../coprocessor/HashJoinRegionScanner.java  |  38 +---
 .../phoenix/iterate/RegionScannerFactory.java   |  51 +
 .../phoenix/schema/stats/StatisticsScanner.java |   4 +-
 10 files changed, 411 insertions(+), 90 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e5a9c72e/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
new file mode 100644
index 000..1c9ac38
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.ConnectionQueryServices.Feature;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Properties;
+import java.util.Random;
+import java.util.UUID;
+
+import static 
org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD;
+import static org.apache.phoenix.query.BaseTest.generateUniqueName;
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.apache.phoenix.query.QueryServices.*;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+
+/**
+ * This is a separate from @PartialResultDisabledIT because it requires server 
side configuration
+ */
+@Category(NeedsOwnMiniClusterTest.class)
+public class PartialResultServerConfigurationIT {
+private static HBaseTestingUtility hbaseTestUtil;
+private static String zkQuorum;
+private static String url;
+
+@BeforeClass
+public static void setUp() throws Exception {
+Configuration conf = HBaseConfiguration.create();
+hbaseTestUtil = new HBaseTestingUtility(conf);
+setUpConfigForMiniCluster(conf);
+
+//Enforce the limit of the result, so scans will stop between cells.
+

[3/3] phoenix git commit: PHOENIX-3112 Partial row scan not handled correctly

2017-10-03 Thread ssa
PHOENIX-3112 Partial row scan not handled correctly


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aaa41a33
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aaa41a33
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aaa41a33

Branch: refs/heads/master
Commit: aaa41a33d025ad6daa832fe8b42fc235e7154648
Parents: bd21ed3
Author: Sergey Soldatov 
Authored: Wed Aug 2 16:56:04 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Oct 3 00:44:26 2017 -0700

--
 .../PartialResultServerConfigurationIT.java | 148 ++
 .../PartialScannerResultsDisabledIT.java| 193 +++
 .../DataTableLocalIndexRegionScanner.java   |   7 +-
 .../hbase/regionserver/ScannerContextUtil.java  |  41 
 .../phoenix/coprocessor/BaseRegionScanner.java  |   4 +-
 .../coprocessor/BaseScannerRegionObserver.java  |  11 +-
 .../coprocessor/DelegateRegionScanner.java  |   4 +-
 .../coprocessor/HashJoinRegionScanner.java  |  38 +---
 .../phoenix/iterate/RegionScannerFactory.java   |  51 +
 .../phoenix/schema/stats/StatisticsScanner.java |   4 +-
 10 files changed, 411 insertions(+), 90 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/aaa41a33/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
new file mode 100644
index 000..1c9ac38
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PartialResultServerConfigurationIT.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.ConnectionQueryServices.Feature;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Properties;
+import java.util.Random;
+import java.util.UUID;
+
+import static 
org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD;
+import static org.apache.phoenix.query.BaseTest.generateUniqueName;
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.apache.phoenix.query.QueryServices.*;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+
+/**
+ * This is a separate from @PartialResultDisabledIT because it requires server 
side configuration
+ */
+@Category(NeedsOwnMiniClusterTest.class)
+public class PartialResultServerConfigurationIT {
+private static HBaseTestingUtility hbaseTestUtil;
+private static String zkQuorum;
+private static String url;
+
+@BeforeClass
+public static void setUp() throws Exception {
+Configuration conf = HBaseConfiguration.create();
+hbaseTestUtil = new HBaseTestingUtility(conf);
+setUpConfigForMiniCluster(conf);
+
+//Enforce the limit of the result, so scans will stop between cells.
+

[1/4] phoenix git commit: PHOENIX-4224 Automatic resending cache for HashJoin doesn't work when cache has expired on server side

2017-09-27 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 ca664fe10 -> 51fae875a
  refs/heads/4.x-HBase-1.1 4e51fe772 -> 8a7ba9dde
  refs/heads/4.x-HBase-1.2 d714afcec -> 53016519d
  refs/heads/master 764eb8f13 -> 033a2fc2a


PHOENIX-4224 Automatic resending cache for HashJoin doesn't work when cache has 
expired on server side


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/51fae875
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/51fae875
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/51fae875

Branch: refs/heads/4.x-HBase-0.98
Commit: 51fae875a97e143a923671314ee11205be6223cf
Parents: ca664fe
Author: Sergey Soldatov 
Authored: Mon Sep 25 19:57:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 12:07:56 2017 -0700

--
 .../apache/phoenix/end2end/HashJoinCacheIT.java | 32 +-
 .../apache/phoenix/cache/ServerCacheClient.java | 46 
 2 files changed, 68 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/51fae875/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
index 3f60d9b..f6ca749 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
@@ -19,9 +19,13 @@ package org.apache.phoenix.end2end;
 
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
+import java.util.Properties;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.client.Scan;
@@ -31,9 +35,12 @@ import 
org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.TenantCache;
+import org.apache.phoenix.coprocessor.HashJoinCacheNotFoundException;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.join.HashJoinInfo;
+import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Test;
@@ -43,6 +50,9 @@ import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.collect.Lists;
 
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.fail;
+
 @RunWith(Parameterized.class)
 public class HashJoinCacheIT extends HashJoinIT {
 
@@ -427,7 +437,27 @@ public class HashJoinCacheIT extends HashJoinIT {
public void testUpsertWithJoin() throws Exception {
// TODO: We will enable this test once PHOENIX-3163
}
-
+
+@Test
+public void testExpiredCache() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
props.setProperty(QueryServices.MAX_SERVER_CACHE_TIME_TO_LIVE_MS_ATTRIB, "1");
+Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName1 = getTableName(conn, JOIN_SUPPLIER_TABLE_FULL_NAME);
+String tableName2 = getTableName(conn, JOIN_ITEM_TABLE_FULL_NAME);
+String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name FROM " +
+tableName1 + " supp RIGHT JOIN " + tableName2 +
+" item ON item.\"supplier_id\" = supp.\"supplier_id\" ORDER BY 
\"item_id\"";
+try {
+PreparedStatement statement = conn.prepareStatement(query);
+ResultSet rs = statement.executeQuery();
+rs.next();
+fail("HashJoinCacheNotFoundException was not thrown or incorrectly 
handled");
+} catch (HashJoinCacheNotFoundException e) {
+//Expected exception
+}
+}
+
 public static class InvalidateHashCache extends SimpleRegionObserver {
 public static Random rand= new Random();
 public static List lastRemovedJoinIds=new 
ArrayList();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/51fae875/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java 

[3/4] phoenix git commit: PHOENIX-4224 Automatic resending cache for HashJoin doesn't work when cache has expired on server side

2017-09-27 Thread ssa
PHOENIX-4224 Automatic resending cache for HashJoin doesn't work when cache has 
expired on server side


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8a7ba9dd
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8a7ba9dd
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8a7ba9dd

Branch: refs/heads/4.x-HBase-1.1
Commit: 8a7ba9ddef5b2c3fea7fc5d57d4185b11c3267e4
Parents: 4e51fe7
Author: Sergey Soldatov 
Authored: Mon Sep 25 19:57:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 12:09:17 2017 -0700

--
 .../apache/phoenix/end2end/HashJoinCacheIT.java | 32 +-
 .../apache/phoenix/cache/ServerCacheClient.java | 46 
 2 files changed, 68 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a7ba9dd/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
index 76f45e2..cebb9ad 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
@@ -19,9 +19,13 @@ package org.apache.phoenix.end2end;
 
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
+import java.util.Properties;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.client.Scan;
@@ -31,9 +35,12 @@ import 
org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.TenantCache;
+import org.apache.phoenix.coprocessor.HashJoinCacheNotFoundException;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.join.HashJoinInfo;
+import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Test;
@@ -43,6 +50,9 @@ import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.collect.Lists;
 
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.fail;
+
 @RunWith(Parameterized.class)
 public class HashJoinCacheIT extends HashJoinIT {
 
@@ -426,7 +436,27 @@ public class HashJoinCacheIT extends HashJoinIT {
public void testUpsertWithJoin() throws Exception {
// TODO: We will enable this test once PHOENIX-3163
}
-
+
+@Test
+public void testExpiredCache() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
props.setProperty(QueryServices.MAX_SERVER_CACHE_TIME_TO_LIVE_MS_ATTRIB, "1");
+Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName1 = getTableName(conn, JOIN_SUPPLIER_TABLE_FULL_NAME);
+String tableName2 = getTableName(conn, JOIN_ITEM_TABLE_FULL_NAME);
+String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name FROM " +
+tableName1 + " supp RIGHT JOIN " + tableName2 +
+" item ON item.\"supplier_id\" = supp.\"supplier_id\" ORDER BY 
\"item_id\"";
+try {
+PreparedStatement statement = conn.prepareStatement(query);
+ResultSet rs = statement.executeQuery();
+rs.next();
+fail("HashJoinCacheNotFoundException was not thrown or incorrectly 
handled");
+} catch (HashJoinCacheNotFoundException e) {
+//Expected exception
+}
+}
+
 public static class InvalidateHashCache extends SimpleRegionObserver {
 public static Random rand= new Random();
 public static List lastRemovedJoinIds=new 
ArrayList();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a7ba9dd/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index ce46a3e..28a42fa 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -28,6 +28,7 @@ import 

[2/4] phoenix git commit: PHOENIX-4224 Automatic resending cache for HashJoin doesn't work when cache has expired on server side

2017-09-27 Thread ssa
PHOENIX-4224 Automatic resending cache for HashJoin doesn't work when cache has 
expired on server side


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/53016519
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/53016519
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/53016519

Branch: refs/heads/4.x-HBase-1.2
Commit: 53016519df73606f49433470768c5037b69ea185
Parents: d714afc
Author: Sergey Soldatov 
Authored: Mon Sep 25 19:57:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 12:09:08 2017 -0700

--
 .../apache/phoenix/end2end/HashJoinCacheIT.java | 32 +-
 .../apache/phoenix/cache/ServerCacheClient.java | 46 
 2 files changed, 68 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/53016519/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
index 76f45e2..cebb9ad 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinCacheIT.java
@@ -19,9 +19,13 @@ package org.apache.phoenix.end2end;
 
 import java.io.IOException;
 import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
+import java.util.Properties;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.client.Scan;
@@ -31,9 +35,12 @@ import 
org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.TenantCache;
+import org.apache.phoenix.coprocessor.HashJoinCacheNotFoundException;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.join.HashJoinInfo;
+import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.util.ByteUtil;
+import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Test;
@@ -43,6 +50,9 @@ import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.collect.Lists;
 
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.fail;
+
 @RunWith(Parameterized.class)
 public class HashJoinCacheIT extends HashJoinIT {
 
@@ -426,7 +436,27 @@ public class HashJoinCacheIT extends HashJoinIT {
public void testUpsertWithJoin() throws Exception {
// TODO: We will enable this test once PHOENIX-3163
}
-
+
+@Test
+public void testExpiredCache() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+
props.setProperty(QueryServices.MAX_SERVER_CACHE_TIME_TO_LIVE_MS_ATTRIB, "1");
+Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName1 = getTableName(conn, JOIN_SUPPLIER_TABLE_FULL_NAME);
+String tableName2 = getTableName(conn, JOIN_ITEM_TABLE_FULL_NAME);
+String query = "SELECT item.\"item_id\", item.name, 
supp.\"supplier_id\", supp.name FROM " +
+tableName1 + " supp RIGHT JOIN " + tableName2 +
+" item ON item.\"supplier_id\" = supp.\"supplier_id\" ORDER BY 
\"item_id\"";
+try {
+PreparedStatement statement = conn.prepareStatement(query);
+ResultSet rs = statement.executeQuery();
+rs.next();
+fail("HashJoinCacheNotFoundException was not thrown or incorrectly 
handled");
+} catch (HashJoinCacheNotFoundException e) {
+//Expected exception
+}
+}
+
 public static class InvalidateHashCache extends SimpleRegionObserver {
 public static Random rand= new Random();
 public static List lastRemovedJoinIds=new 
ArrayList();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/53016519/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index ce46a3e..28a42fa 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -28,6 +28,7 @@ import 

[1/4] phoenix git commit: PHOENIX-4225 Using Google cache may lead to lock up on RS side.

2017-09-27 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 f8df68997 -> ca664fe10
  refs/heads/4.x-HBase-1.1 fd2b064a0 -> 4e51fe772
  refs/heads/4.x-HBase-1.2 6f923a419 -> d714afcec
  refs/heads/master 84dc1d44a -> 764eb8f13


PHOENIX-4225 Using Google cache may lead to lock up on RS side.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/764eb8f1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/764eb8f1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/764eb8f1

Branch: refs/heads/master
Commit: 764eb8f13237a89961e259aca366e45b38b76ef1
Parents: 84dc1d4
Author: Sergey Soldatov 
Authored: Tue Sep 26 14:41:53 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 10:42:59 2017 -0700

--
 .../apache/phoenix/cache/TenantCacheImpl.java   |  2 ++
 .../apache/phoenix/cache/TenantCacheTest.java   | 37 +++-
 2 files changed, 38 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/764eb8f1/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
index 3d178f6..fdf0646 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
@@ -100,11 +100,13 @@ public class TenantCacheImpl implements TenantCache {
 
 @Override
 public Closeable getServerCache(ImmutableBytesPtr cacheId) {
+getServerCaches().cleanUp();
 return getServerCaches().getIfPresent(cacheId);
 }
 
 @Override
 public Closeable addServerCache(ImmutableBytesPtr cacheId, 
ImmutableBytesWritable cachePtr, byte[] txState, ServerCacheFactory 
cacheFactory, boolean useProtoForIndexMaintainer) throws SQLException {
+getServerCaches().cleanUp();
 MemoryChunk chunk = 
this.getMemoryManager().allocate(cachePtr.getLength() + txState.length);
 boolean success = false;
 try {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/764eb8f1/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
index 932149c..f4c83b2 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.cache;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 import java.io.Closeable;
 import java.io.DataInput;
@@ -67,7 +68,41 @@ public class TenantCacheTest {
 cache.cleanUp();
 assertEquals(maxBytes, memoryManager.getAvailableMemory());
 }
-
+
+
+@Test
+public void testFreeMemoryOnAccess() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 1000;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("a"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(maxBytes-1, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+assertNull(cache.getServerCache(cacheId1));
+assertEquals(maxBytes, memoryManager.getAvailableMemory());
+}
+
+@Test
+public void testExpiredCacheOnAddingNew() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 10;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesPtr cacheId2 = new ImmutableBytesPtr(Bytes.toBytes("b"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("12345678"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+

[2/4] phoenix git commit: PHOENIX-4225 Using Google cache may lead to lock up on RS side.

2017-09-27 Thread ssa
PHOENIX-4225 Using Google cache may lead to lock up on RS side.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4e51fe77
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4e51fe77
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4e51fe77

Branch: refs/heads/4.x-HBase-1.1
Commit: 4e51fe7726b896b5c05e43aee825cfb1aded480d
Parents: fd2b064
Author: Sergey Soldatov 
Authored: Tue Sep 26 14:41:53 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 10:43:34 2017 -0700

--
 .../apache/phoenix/cache/TenantCacheImpl.java   |  2 ++
 .../apache/phoenix/cache/TenantCacheTest.java   | 37 +++-
 2 files changed, 38 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/4e51fe77/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
index 3d178f6..fdf0646 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
@@ -100,11 +100,13 @@ public class TenantCacheImpl implements TenantCache {
 
 @Override
 public Closeable getServerCache(ImmutableBytesPtr cacheId) {
+getServerCaches().cleanUp();
 return getServerCaches().getIfPresent(cacheId);
 }
 
 @Override
 public Closeable addServerCache(ImmutableBytesPtr cacheId, 
ImmutableBytesWritable cachePtr, byte[] txState, ServerCacheFactory 
cacheFactory, boolean useProtoForIndexMaintainer) throws SQLException {
+getServerCaches().cleanUp();
 MemoryChunk chunk = 
this.getMemoryManager().allocate(cachePtr.getLength() + txState.length);
 boolean success = false;
 try {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/4e51fe77/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
index 932149c..f4c83b2 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.cache;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 import java.io.Closeable;
 import java.io.DataInput;
@@ -67,7 +68,41 @@ public class TenantCacheTest {
 cache.cleanUp();
 assertEquals(maxBytes, memoryManager.getAvailableMemory());
 }
-
+
+
+@Test
+public void testFreeMemoryOnAccess() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 1000;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("a"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(maxBytes-1, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+assertNull(cache.getServerCache(cacheId1));
+assertEquals(maxBytes, memoryManager.getAvailableMemory());
+}
+
+@Test
+public void testExpiredCacheOnAddingNew() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 10;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesPtr cacheId2 = new ImmutableBytesPtr(Bytes.toBytes("b"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("12345678"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(2, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+

[4/4] phoenix git commit: PHOENIX-4225 Using Google cache may lead to lock up on RS side.

2017-09-27 Thread ssa
PHOENIX-4225 Using Google cache may lead to lock up on RS side.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ca664fe1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ca664fe1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ca664fe1

Branch: refs/heads/4.x-HBase-0.98
Commit: ca664fe1020166d5b074276117a71bb2769edd1d
Parents: f8df689
Author: Sergey Soldatov 
Authored: Tue Sep 26 14:41:53 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 10:44:37 2017 -0700

--
 .../apache/phoenix/cache/TenantCacheImpl.java   |  2 ++
 .../apache/phoenix/cache/TenantCacheTest.java   | 37 +++-
 2 files changed, 38 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca664fe1/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
index 3d178f6..fdf0646 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
@@ -100,11 +100,13 @@ public class TenantCacheImpl implements TenantCache {
 
 @Override
 public Closeable getServerCache(ImmutableBytesPtr cacheId) {
+getServerCaches().cleanUp();
 return getServerCaches().getIfPresent(cacheId);
 }
 
 @Override
 public Closeable addServerCache(ImmutableBytesPtr cacheId, 
ImmutableBytesWritable cachePtr, byte[] txState, ServerCacheFactory 
cacheFactory, boolean useProtoForIndexMaintainer) throws SQLException {
+getServerCaches().cleanUp();
 MemoryChunk chunk = 
this.getMemoryManager().allocate(cachePtr.getLength() + txState.length);
 boolean success = false;
 try {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca664fe1/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
index 932149c..f4c83b2 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.cache;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 import java.io.Closeable;
 import java.io.DataInput;
@@ -67,7 +68,41 @@ public class TenantCacheTest {
 cache.cleanUp();
 assertEquals(maxBytes, memoryManager.getAvailableMemory());
 }
-
+
+
+@Test
+public void testFreeMemoryOnAccess() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 1000;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("a"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(maxBytes-1, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+assertNull(cache.getServerCache(cacheId1));
+assertEquals(maxBytes, memoryManager.getAvailableMemory());
+}
+
+@Test
+public void testExpiredCacheOnAddingNew() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 10;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesPtr cacheId2 = new ImmutableBytesPtr(Bytes.toBytes("b"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("12345678"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(2, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+

[3/4] phoenix git commit: PHOENIX-4225 Using Google cache may lead to lock up on RS side.

2017-09-27 Thread ssa
PHOENIX-4225 Using Google cache may lead to lock up on RS side.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d714afce
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d714afce
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d714afce

Branch: refs/heads/4.x-HBase-1.2
Commit: d714afcecfa44e9ce4b017e9c15ecd4cd0d4ba51
Parents: 6f923a4
Author: Sergey Soldatov 
Authored: Tue Sep 26 14:41:53 2017 -0700
Committer: Sergey Soldatov 
Committed: Wed Sep 27 10:43:41 2017 -0700

--
 .../apache/phoenix/cache/TenantCacheImpl.java   |  2 ++
 .../apache/phoenix/cache/TenantCacheTest.java   | 37 +++-
 2 files changed, 38 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d714afce/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
index 3d178f6..fdf0646 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/TenantCacheImpl.java
@@ -100,11 +100,13 @@ public class TenantCacheImpl implements TenantCache {
 
 @Override
 public Closeable getServerCache(ImmutableBytesPtr cacheId) {
+getServerCaches().cleanUp();
 return getServerCaches().getIfPresent(cacheId);
 }
 
 @Override
 public Closeable addServerCache(ImmutableBytesPtr cacheId, 
ImmutableBytesWritable cachePtr, byte[] txState, ServerCacheFactory 
cacheFactory, boolean useProtoForIndexMaintainer) throws SQLException {
+getServerCaches().cleanUp();
 MemoryChunk chunk = 
this.getMemoryManager().allocate(cachePtr.getLength() + txState.length);
 boolean success = false;
 try {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d714afce/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
index 932149c..f4c83b2 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/cache/TenantCacheTest.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.cache;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 
 import java.io.Closeable;
 import java.io.DataInput;
@@ -67,7 +68,41 @@ public class TenantCacheTest {
 cache.cleanUp();
 assertEquals(maxBytes, memoryManager.getAvailableMemory());
 }
-
+
+
+@Test
+public void testFreeMemoryOnAccess() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 1000;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("a"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(maxBytes-1, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+assertNull(cache.getServerCache(cacheId1));
+assertEquals(maxBytes, memoryManager.getAvailableMemory());
+}
+
+@Test
+public void testExpiredCacheOnAddingNew() throws Exception {
+int maxServerCacheTimeToLive = 10;
+long maxBytes = 10;
+GlobalMemoryManager memoryManager = new GlobalMemoryManager(maxBytes);
+ManualTicker ticker = new ManualTicker();
+TenantCacheImpl cache = new TenantCacheImpl(memoryManager, 
maxServerCacheTimeToLive, ticker);
+ImmutableBytesPtr cacheId1 = new ImmutableBytesPtr(Bytes.toBytes("a"));
+ImmutableBytesPtr cacheId2 = new ImmutableBytesPtr(Bytes.toBytes("b"));
+ImmutableBytesWritable cachePtr = new 
ImmutableBytesWritable(Bytes.toBytes("12345678"));
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+assertEquals(2, memoryManager.getAvailableMemory());
+ticker.time += (maxServerCacheTimeToLive + 1) * 100;
+cache.addServerCache(cacheId1, cachePtr, ByteUtil.EMPTY_BYTE_ARRAY, 
cacheFactory, true);
+

[4/4] phoenix git commit: PHOENIX-4068 Atomic Upsert salted table with error(java.lang.NullPointerException)

2017-09-05 Thread ssa
PHOENIX-4068 Atomic Upsert salted table with 
error(java.lang.NullPointerException)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f1d2b6f0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f1d2b6f0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f1d2b6f0

Branch: refs/heads/4.x-HBase-1.2
Commit: f1d2b6f03123eee2f49a02cd442f58a2ad0a3694
Parents: 41c0521
Author: Sergey Soldatov 
Authored: Thu Aug 10 22:06:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 13:39:49 2017 -0700

--
 .../phoenix/end2end/OnDuplicateKeyIT.java   | 33 +++-
 .../apache/phoenix/compile/UpsertCompiler.java  |  9 +++---
 .../phoenix/index/PhoenixIndexBuilder.java  |  3 +-
 3 files changed, 39 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1d2b6f0/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
index 2477f56..f1ee0e7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
@@ -21,6 +21,7 @@ import static 
org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.Date;
@@ -549,6 +550,36 @@ public class OnDuplicateKeyIT extends 
ParallelStatsDisabledIT {
 
 conn.close();
 }
-
+@Test
+public void testDuplicateUpdateWithSaltedTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+final Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName = generateUniqueName();
+try {
+String ddl = "create table " + tableName + " (id varchar not 
null,id1 varchar not null, counter1 bigint, counter2 bigint CONSTRAINT pk 
PRIMARY KEY (id,id1)) SALT_BUCKETS=6";
+conn.createStatement().execute(ddl);
+createIndex(conn, tableName);
+String dml = "UPSERT INTO " + tableName + " (id,id1, counter1, 
counter2) VALUES ('abc','123', 0, 0) ON DUPLICATE KEY UPDATE counter1 = 
counter1 + 1, counter2 = counter2 + 1";
+conn.createStatement().execute(dml);
+conn.commit();
+ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM 
" + tableName);
+assertTrue(rs.next());
+assertEquals("0",rs.getString(3));
+assertEquals("0",rs.getString(4));
+conn.createStatement().execute(dml);
+conn.commit();
+rs = conn.createStatement().executeQuery("SELECT * FROM " + 
tableName);
+assertTrue(rs.next());
+assertEquals("1",rs.getString(3));
+assertEquals("1",rs.getString(4));
+
+} catch (Exception e) {
+fail();
+} finally {
+conn.close();
+}
+}
+
+
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f1d2b6f0/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 0d09e9d..c384292 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -916,15 +916,16 @@ public class UpsertCompiler {
 }
 if (onDupKeyPairs.isEmpty()) { // ON DUPLICATE KEY IGNORE
 onDupKeyBytesToBe = 
PhoenixIndexBuilder.serializeOnDupKeyIgnore();
-} else {   // ON DUPLICATE KEY UPDATE
-int position = 1;
+} else {   // ON DUPLICATE KEY UPDATE;
+int position = table.getBucketNum() == null ? 0 : 1;
 UpdateColumnCompiler compiler = new 
UpdateColumnCompiler(context);
 int nColumns = onDupKeyPairs.size();
 List updateExpressions = 
Lists.newArrayListWithExpectedSize(nColumns);
 LinkedHashSet updateColumns = 
Sets.newLinkedHashSetWithExpectedSize(nColumns + 1);
 updateColumns.add(new PColumnImpl(
-table.getPKColumns().get(0).getName(), // Use 

[2/4] phoenix git commit: PHOENIX-4068 Atomic Upsert salted table with error(java.lang.NullPointerException)

2017-09-05 Thread ssa
PHOENIX-4068 Atomic Upsert salted table with 
error(java.lang.NullPointerException)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9f282eff
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9f282eff
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9f282eff

Branch: refs/heads/4.x-HBase-0.98
Commit: 9f282eff45931d5fab3a59a2c81f1c6ea3e8bb96
Parents: 7513d66
Author: Sergey Soldatov 
Authored: Thu Aug 10 22:06:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 13:39:31 2017 -0700

--
 .../phoenix/end2end/OnDuplicateKeyIT.java   | 33 +++-
 .../apache/phoenix/compile/UpsertCompiler.java  |  9 +++---
 .../phoenix/index/PhoenixIndexBuilder.java  |  3 +-
 3 files changed, 39 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9f282eff/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
index 2477f56..f1ee0e7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
@@ -21,6 +21,7 @@ import static 
org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.Date;
@@ -549,6 +550,36 @@ public class OnDuplicateKeyIT extends 
ParallelStatsDisabledIT {
 
 conn.close();
 }
-
+@Test
+public void testDuplicateUpdateWithSaltedTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+final Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName = generateUniqueName();
+try {
+String ddl = "create table " + tableName + " (id varchar not 
null,id1 varchar not null, counter1 bigint, counter2 bigint CONSTRAINT pk 
PRIMARY KEY (id,id1)) SALT_BUCKETS=6";
+conn.createStatement().execute(ddl);
+createIndex(conn, tableName);
+String dml = "UPSERT INTO " + tableName + " (id,id1, counter1, 
counter2) VALUES ('abc','123', 0, 0) ON DUPLICATE KEY UPDATE counter1 = 
counter1 + 1, counter2 = counter2 + 1";
+conn.createStatement().execute(dml);
+conn.commit();
+ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM 
" + tableName);
+assertTrue(rs.next());
+assertEquals("0",rs.getString(3));
+assertEquals("0",rs.getString(4));
+conn.createStatement().execute(dml);
+conn.commit();
+rs = conn.createStatement().executeQuery("SELECT * FROM " + 
tableName);
+assertTrue(rs.next());
+assertEquals("1",rs.getString(3));
+assertEquals("1",rs.getString(4));
+
+} catch (Exception e) {
+fail();
+} finally {
+conn.close();
+}
+}
+
+
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/9f282eff/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 1669ab9..763c81a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -916,15 +916,16 @@ public class UpsertCompiler {
 }
 if (onDupKeyPairs.isEmpty()) { // ON DUPLICATE KEY IGNORE
 onDupKeyBytesToBe = 
PhoenixIndexBuilder.serializeOnDupKeyIgnore();
-} else {   // ON DUPLICATE KEY UPDATE
-int position = 1;
+} else {   // ON DUPLICATE KEY UPDATE;
+int position = table.getBucketNum() == null ? 0 : 1;
 UpdateColumnCompiler compiler = new 
UpdateColumnCompiler(context);
 int nColumns = onDupKeyPairs.size();
 List updateExpressions = 
Lists.newArrayListWithExpectedSize(nColumns);
 LinkedHashSet updateColumns = 
Sets.newLinkedHashSetWithExpectedSize(nColumns + 1);
 updateColumns.add(new PColumnImpl(
-table.getPKColumns().get(0).getName(), // Use 

[3/4] phoenix git commit: PHOENIX-4068 Atomic Upsert salted table with error(java.lang.NullPointerException)

2017-09-05 Thread ssa
PHOENIX-4068 Atomic Upsert salted table with 
error(java.lang.NullPointerException)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/63779600
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/63779600
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/63779600

Branch: refs/heads/4.x-HBase-1.1
Commit: 63779600dd0d2df3df5d443de631fd6f00dd0304
Parents: cb12016
Author: Sergey Soldatov 
Authored: Thu Aug 10 22:06:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 13:39:42 2017 -0700

--
 .../phoenix/end2end/OnDuplicateKeyIT.java   | 33 +++-
 .../apache/phoenix/compile/UpsertCompiler.java  |  9 +++---
 .../phoenix/index/PhoenixIndexBuilder.java  |  3 +-
 3 files changed, 39 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/63779600/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
index 2477f56..f1ee0e7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
@@ -21,6 +21,7 @@ import static 
org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.Date;
@@ -549,6 +550,36 @@ public class OnDuplicateKeyIT extends 
ParallelStatsDisabledIT {
 
 conn.close();
 }
-
+@Test
+public void testDuplicateUpdateWithSaltedTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+final Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName = generateUniqueName();
+try {
+String ddl = "create table " + tableName + " (id varchar not 
null,id1 varchar not null, counter1 bigint, counter2 bigint CONSTRAINT pk 
PRIMARY KEY (id,id1)) SALT_BUCKETS=6";
+conn.createStatement().execute(ddl);
+createIndex(conn, tableName);
+String dml = "UPSERT INTO " + tableName + " (id,id1, counter1, 
counter2) VALUES ('abc','123', 0, 0) ON DUPLICATE KEY UPDATE counter1 = 
counter1 + 1, counter2 = counter2 + 1";
+conn.createStatement().execute(dml);
+conn.commit();
+ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM 
" + tableName);
+assertTrue(rs.next());
+assertEquals("0",rs.getString(3));
+assertEquals("0",rs.getString(4));
+conn.createStatement().execute(dml);
+conn.commit();
+rs = conn.createStatement().executeQuery("SELECT * FROM " + 
tableName);
+assertTrue(rs.next());
+assertEquals("1",rs.getString(3));
+assertEquals("1",rs.getString(4));
+
+} catch (Exception e) {
+fail();
+} finally {
+conn.close();
+}
+}
+
+
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/63779600/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 0d09e9d..c384292 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -916,15 +916,16 @@ public class UpsertCompiler {
 }
 if (onDupKeyPairs.isEmpty()) { // ON DUPLICATE KEY IGNORE
 onDupKeyBytesToBe = 
PhoenixIndexBuilder.serializeOnDupKeyIgnore();
-} else {   // ON DUPLICATE KEY UPDATE
-int position = 1;
+} else {   // ON DUPLICATE KEY UPDATE;
+int position = table.getBucketNum() == null ? 0 : 1;
 UpdateColumnCompiler compiler = new 
UpdateColumnCompiler(context);
 int nColumns = onDupKeyPairs.size();
 List updateExpressions = 
Lists.newArrayListWithExpectedSize(nColumns);
 LinkedHashSet updateColumns = 
Sets.newLinkedHashSetWithExpectedSize(nColumns + 1);
 updateColumns.add(new PColumnImpl(
-table.getPKColumns().get(0).getName(), // Use 

[1/4] phoenix git commit: PHOENIX-4068 Atomic Upsert salted table with error(java.lang.NullPointerException)

2017-09-05 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 7513d663f -> 9f282eff4
  refs/heads/4.x-HBase-1.1 cb1201620 -> 63779600d
  refs/heads/4.x-HBase-1.2 41c05215c -> f1d2b6f03
  refs/heads/master cec7e1cf8 -> 839be97e9


PHOENIX-4068 Atomic Upsert salted table with 
error(java.lang.NullPointerException)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/839be97e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/839be97e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/839be97e

Branch: refs/heads/master
Commit: 839be97e9ef2d23d3e9713313d4a93521bc74028
Parents: cec7e1c
Author: Sergey Soldatov 
Authored: Thu Aug 10 22:06:49 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 13:31:30 2017 -0700

--
 .../phoenix/end2end/OnDuplicateKeyIT.java   | 33 +++-
 .../apache/phoenix/compile/UpsertCompiler.java  |  9 +++---
 .../phoenix/index/PhoenixIndexBuilder.java  |  3 +-
 3 files changed, 39 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/839be97e/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
index 2477f56..f1ee0e7 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OnDuplicateKeyIT.java
@@ -21,6 +21,7 @@ import static 
org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.Date;
@@ -549,6 +550,36 @@ public class OnDuplicateKeyIT extends 
ParallelStatsDisabledIT {
 
 conn.close();
 }
-
+@Test
+public void testDuplicateUpdateWithSaltedTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+final Connection conn = DriverManager.getConnection(getUrl(), props);
+String tableName = generateUniqueName();
+try {
+String ddl = "create table " + tableName + " (id varchar not 
null,id1 varchar not null, counter1 bigint, counter2 bigint CONSTRAINT pk 
PRIMARY KEY (id,id1)) SALT_BUCKETS=6";
+conn.createStatement().execute(ddl);
+createIndex(conn, tableName);
+String dml = "UPSERT INTO " + tableName + " (id,id1, counter1, 
counter2) VALUES ('abc','123', 0, 0) ON DUPLICATE KEY UPDATE counter1 = 
counter1 + 1, counter2 = counter2 + 1";
+conn.createStatement().execute(dml);
+conn.commit();
+ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM 
" + tableName);
+assertTrue(rs.next());
+assertEquals("0",rs.getString(3));
+assertEquals("0",rs.getString(4));
+conn.createStatement().execute(dml);
+conn.commit();
+rs = conn.createStatement().executeQuery("SELECT * FROM " + 
tableName);
+assertTrue(rs.next());
+assertEquals("1",rs.getString(3));
+assertEquals("1",rs.getString(4));
+
+} catch (Exception e) {
+fail();
+} finally {
+conn.close();
+}
+}
+
+
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/839be97e/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 0d09e9d..c384292 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -916,15 +916,16 @@ public class UpsertCompiler {
 }
 if (onDupKeyPairs.isEmpty()) { // ON DUPLICATE KEY IGNORE
 onDupKeyBytesToBe = 
PhoenixIndexBuilder.serializeOnDupKeyIgnore();
-} else {   // ON DUPLICATE KEY UPDATE
-int position = 1;
+} else {   // ON DUPLICATE KEY UPDATE;
+int position = table.getBucketNum() == null ? 0 : 1;
 UpdateColumnCompiler compiler = new 
UpdateColumnCompiler(context);
 int nColumns = onDupKeyPairs.size();
 List updateExpressions = 

[4/4] phoenix git commit: PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP

2017-09-05 Thread ssa
PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cec7e1cf
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cec7e1cf
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cec7e1cf

Branch: refs/heads/master
Commit: cec7e1cf8794e7ec0ee5c8be9a32e33cd211ec3b
Parents: c8cbb5e
Author: Sergey Soldatov 
Authored: Tue Oct 25 14:09:54 2016 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 12:46:47 2017 -0700

--
 .../phoenix/end2end/CsvBulkLoadToolIT.java  | 38 
 .../mapreduce/FormatToBytesWritableMapper.java  |  1 +
 .../mapreduce/FormatToKeyValueReducer.java  |  7 ++--
 3 files changed, 43 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cec7e1cf/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
index 5a186a0..40fe900 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
@@ -92,6 +92,44 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 rs.close();
 stmt.close();
 }
+@Test
+public void testImportWithRowTimestamp() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE S.TABLE9 (ID INTEGER NOT NULL , NAME 
VARCHAR, T DATE NOT NULL," +
+" " +
+"CONSTRAINT PK PRIMARY KEY (ID, T ROW_TIMESTAMP))");
+
+FileSystem fs = FileSystem.get(getUtility().getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input1.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,Name 1,1970/01/01");
+printWriter.println("2,Name 2,1971/01/01");
+printWriter.println("3,Name 2,1972/01/01");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(new 
Configuration(getUtility().getConfiguration()));
+csvBulkLoadTool.getConf().set(DATE_FORMAT_ATTRIB,"/MM/dd");
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input1.csv",
+"--table", "table9",
+"--schema", "s",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, name, t FROM s.table9 
WHERE T < to_date" +
+"('1972-01-01') AND T > to_date('1970-01-01') ORDER BY id");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("Name 2", rs.getString(2));
+assertEquals(DateUtil.parseDate("1971-01-01"), rs.getDate(3));
+assertFalse(rs.next());
+
+rs.close();
+stmt.close();
+}
+
 
 @Test
 public void testImportWithTabs() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/cec7e1cf/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
index 1dae981..360859e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
@@ -314,6 +314,7 @@ public abstract class FormatToBytesWritableMapper 
extends Mapperhttp://git-wip-us.apache.org/repos/asf/phoenix/blob/cec7e1cf/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
index 07cf285..72af1a7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
@@ -144,6 +144,7 @@ public class FormatToKeyValueReducer
 DataInputStream input = new DataInputStream(new 
ByteArrayInputStream(aggregatedArray.get()));
 while (input.available() != 0) {
 

[3/4] phoenix git commit: PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP

2017-09-05 Thread ssa
PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/41c05215
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/41c05215
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/41c05215

Branch: refs/heads/4.x-HBase-1.2
Commit: 41c05215ca6c4d06db352398065479d6a228b2d8
Parents: 3b8468e
Author: Sergey Soldatov 
Authored: Tue Oct 25 14:09:54 2016 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 12:45:59 2017 -0700

--
 .../phoenix/end2end/CsvBulkLoadToolIT.java  | 38 
 .../mapreduce/FormatToBytesWritableMapper.java  |  1 +
 .../mapreduce/FormatToKeyValueReducer.java  |  7 ++--
 3 files changed, 43 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/41c05215/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
index 5a186a0..40fe900 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
@@ -92,6 +92,44 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 rs.close();
 stmt.close();
 }
+@Test
+public void testImportWithRowTimestamp() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE S.TABLE9 (ID INTEGER NOT NULL , NAME 
VARCHAR, T DATE NOT NULL," +
+" " +
+"CONSTRAINT PK PRIMARY KEY (ID, T ROW_TIMESTAMP))");
+
+FileSystem fs = FileSystem.get(getUtility().getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input1.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,Name 1,1970/01/01");
+printWriter.println("2,Name 2,1971/01/01");
+printWriter.println("3,Name 2,1972/01/01");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(new 
Configuration(getUtility().getConfiguration()));
+csvBulkLoadTool.getConf().set(DATE_FORMAT_ATTRIB,"/MM/dd");
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input1.csv",
+"--table", "table9",
+"--schema", "s",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, name, t FROM s.table9 
WHERE T < to_date" +
+"('1972-01-01') AND T > to_date('1970-01-01') ORDER BY id");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("Name 2", rs.getString(2));
+assertEquals(DateUtil.parseDate("1971-01-01"), rs.getDate(3));
+assertFalse(rs.next());
+
+rs.close();
+stmt.close();
+}
+
 
 @Test
 public void testImportWithTabs() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/41c05215/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
index 1dae981..360859e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
@@ -314,6 +314,7 @@ public abstract class FormatToBytesWritableMapper 
extends Mapperhttp://git-wip-us.apache.org/repos/asf/phoenix/blob/41c05215/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
index 07cf285..72af1a7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
@@ -144,6 +144,7 @@ public class FormatToKeyValueReducer
 DataInputStream input = new DataInputStream(new 
ByteArrayInputStream(aggregatedArray.get()));
 while (input.available() != 0) {
  

[1/4] phoenix git commit: PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP

2017-09-05 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 d81ad6a5f -> 7513d663f
  refs/heads/4.x-HBase-1.1 6fcf5bb2e -> cb1201620
  refs/heads/4.x-HBase-1.2 3b8468e27 -> 41c05215c
  refs/heads/master c8cbb5e5e -> cec7e1cf8


PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7513d663
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7513d663
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7513d663

Branch: refs/heads/4.x-HBase-0.98
Commit: 7513d663f74ae90c4e1f65066bfd2ffcb326e1e7
Parents: d81ad6a
Author: Sergey Soldatov 
Authored: Tue Oct 25 14:09:54 2016 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 12:45:23 2017 -0700

--
 .../phoenix/end2end/CsvBulkLoadToolIT.java  | 38 
 .../mapreduce/FormatToBytesWritableMapper.java  |  1 +
 .../mapreduce/FormatToKeyValueReducer.java  |  7 ++--
 3 files changed, 43 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7513d663/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
index 5a186a0..40fe900 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
@@ -92,6 +92,44 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 rs.close();
 stmt.close();
 }
+@Test
+public void testImportWithRowTimestamp() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE S.TABLE9 (ID INTEGER NOT NULL , NAME 
VARCHAR, T DATE NOT NULL," +
+" " +
+"CONSTRAINT PK PRIMARY KEY (ID, T ROW_TIMESTAMP))");
+
+FileSystem fs = FileSystem.get(getUtility().getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input1.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,Name 1,1970/01/01");
+printWriter.println("2,Name 2,1971/01/01");
+printWriter.println("3,Name 2,1972/01/01");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(new 
Configuration(getUtility().getConfiguration()));
+csvBulkLoadTool.getConf().set(DATE_FORMAT_ATTRIB,"/MM/dd");
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input1.csv",
+"--table", "table9",
+"--schema", "s",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, name, t FROM s.table9 
WHERE T < to_date" +
+"('1972-01-01') AND T > to_date('1970-01-01') ORDER BY id");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("Name 2", rs.getString(2));
+assertEquals(DateUtil.parseDate("1971-01-01"), rs.getDate(3));
+assertFalse(rs.next());
+
+rs.close();
+stmt.close();
+}
+
 
 @Test
 public void testImportWithTabs() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7513d663/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
index 1dae981..360859e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
@@ -314,6 +314,7 @@ public abstract class FormatToBytesWritableMapper 
extends Mapperhttp://git-wip-us.apache.org/repos/asf/phoenix/blob/7513d663/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
index 07cf285..72af1a7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
+++ 

[2/4] phoenix git commit: PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP

2017-09-05 Thread ssa
PHOENIX-3406 CSV BulkLoad MR job incorrectly handle ROW_TIMESTAMP


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cb120162
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cb120162
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cb120162

Branch: refs/heads/4.x-HBase-1.1
Commit: cb12016206c0b589b9781d5cb06555ab276d7d9a
Parents: 6fcf5bb
Author: Sergey Soldatov 
Authored: Tue Oct 25 14:09:54 2016 -0700
Committer: Sergey Soldatov 
Committed: Tue Sep 5 12:45:46 2017 -0700

--
 .../phoenix/end2end/CsvBulkLoadToolIT.java  | 38 
 .../mapreduce/FormatToBytesWritableMapper.java  |  1 +
 .../mapreduce/FormatToKeyValueReducer.java  |  7 ++--
 3 files changed, 43 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cb120162/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
index 5a186a0..40fe900 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
@@ -92,6 +92,44 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 rs.close();
 stmt.close();
 }
+@Test
+public void testImportWithRowTimestamp() throws Exception {
+
+Statement stmt = conn.createStatement();
+stmt.execute("CREATE TABLE S.TABLE9 (ID INTEGER NOT NULL , NAME 
VARCHAR, T DATE NOT NULL," +
+" " +
+"CONSTRAINT PK PRIMARY KEY (ID, T ROW_TIMESTAMP))");
+
+FileSystem fs = FileSystem.get(getUtility().getConfiguration());
+FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input1.csv"));
+PrintWriter printWriter = new PrintWriter(outputStream);
+printWriter.println("1,Name 1,1970/01/01");
+printWriter.println("2,Name 2,1971/01/01");
+printWriter.println("3,Name 2,1972/01/01");
+printWriter.close();
+
+CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
+csvBulkLoadTool.setConf(new 
Configuration(getUtility().getConfiguration()));
+csvBulkLoadTool.getConf().set(DATE_FORMAT_ATTRIB,"/MM/dd");
+int exitCode = csvBulkLoadTool.run(new String[] {
+"--input", "/tmp/input1.csv",
+"--table", "table9",
+"--schema", "s",
+"--zookeeper", zkQuorum});
+assertEquals(0, exitCode);
+
+ResultSet rs = stmt.executeQuery("SELECT id, name, t FROM s.table9 
WHERE T < to_date" +
+"('1972-01-01') AND T > to_date('1970-01-01') ORDER BY id");
+assertTrue(rs.next());
+assertEquals(2, rs.getInt(1));
+assertEquals("Name 2", rs.getString(2));
+assertEquals(DateUtil.parseDate("1971-01-01"), rs.getDate(3));
+assertFalse(rs.next());
+
+rs.close();
+stmt.close();
+}
+
 
 @Test
 public void testImportWithTabs() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/cb120162/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
index 1dae981..360859e 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToBytesWritableMapper.java
@@ -314,6 +314,7 @@ public abstract class FormatToBytesWritableMapper 
extends Mapperhttp://git-wip-us.apache.org/repos/asf/phoenix/blob/cb120162/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
index 07cf285..72af1a7 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/FormatToKeyValueReducer.java
@@ -144,6 +144,7 @@ public class FormatToKeyValueReducer
 DataInputStream input = new DataInputStream(new 
ByteArrayInputStream(aggregatedArray.get()));
 while (input.available() != 0) {
  

[1/4] phoenix git commit: PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs

2017-07-25 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 318dd2303 -> cab56f39d
  refs/heads/4.x-HBase-1.1 e9498bf47 -> 0a84cb8c9
  refs/heads/4.x-HBase-1.2 54c28d196 -> bf334b12e
  refs/heads/master a78811131 -> 9c458fa3d


PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9c458fa3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9c458fa3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9c458fa3

Branch: refs/heads/master
Commit: 9c458fa3d3ecdeb17de5b717c26cfdea1608c358
Parents: a788111
Author: Sergey Soldatov 
Authored: Mon Jun 19 12:49:29 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Jul 25 14:17:16 2017 -0700

--
 .../apache/phoenix/hive/PhoenixStorageHandler.java| 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9c458fa3/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index ae8f242..4e9f465 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -51,6 +51,7 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.UUID;
 
 /**
  * This class manages all the Phoenix/Hive table initial configurations and 
SerDe Election
@@ -110,6 +111,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return PhoenixOutputFormat.class;
 }
 
+@SuppressWarnings("rawtypes")
+@Override
+public Class getInputFormatClass() {
+return PhoenixInputFormat.class;
+}
+
 @Override
 public void configureInputJobProperties(TableDesc tableDesc, Map
 jobProperties) {
@@ -182,7 +189,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 }
 SessionState sessionState = SessionState.get();
 
-String sessionId = sessionState.getSessionId();
+String sessionId;
+if(sessionState!= null) {
+sessionId = sessionState.getSessionId();
+}  else {
+sessionId = UUID.randomUUID().toString();
+}
 jobProperties.put(PhoenixConfigurationUtil.SESSION_ID, sessionId);
 jobProperties.put(PhoenixConfigurationUtil.INPUT_TABLE_NAME, 
tableName);
 jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM, 
tableProperties



[2/4] phoenix git commit: PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs

2017-07-25 Thread ssa
PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bf334b12
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bf334b12
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bf334b12

Branch: refs/heads/4.x-HBase-1.2
Commit: bf334b12e419d5ff3b32cbf32e9635dde0c90884
Parents: 54c28d1
Author: Sergey Soldatov 
Authored: Mon Jun 19 12:49:29 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Jul 25 14:17:49 2017 -0700

--
 .../apache/phoenix/hive/PhoenixStorageHandler.java| 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bf334b12/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index ae8f242..4e9f465 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -51,6 +51,7 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.UUID;
 
 /**
  * This class manages all the Phoenix/Hive table initial configurations and 
SerDe Election
@@ -110,6 +111,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return PhoenixOutputFormat.class;
 }
 
+@SuppressWarnings("rawtypes")
+@Override
+public Class getInputFormatClass() {
+return PhoenixInputFormat.class;
+}
+
 @Override
 public void configureInputJobProperties(TableDesc tableDesc, Map
 jobProperties) {
@@ -182,7 +189,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 }
 SessionState sessionState = SessionState.get();
 
-String sessionId = sessionState.getSessionId();
+String sessionId;
+if(sessionState!= null) {
+sessionId = sessionState.getSessionId();
+}  else {
+sessionId = UUID.randomUUID().toString();
+}
 jobProperties.put(PhoenixConfigurationUtil.SESSION_ID, sessionId);
 jobProperties.put(PhoenixConfigurationUtil.INPUT_TABLE_NAME, 
tableName);
 jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM, 
tableProperties



[4/4] phoenix git commit: PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs

2017-07-25 Thread ssa
PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cab56f39
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cab56f39
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cab56f39

Branch: refs/heads/4.x-HBase-0.98
Commit: cab56f39deb85168ac85d3e4100c313695be80be
Parents: 318dd23
Author: Sergey Soldatov 
Authored: Mon Jun 19 12:49:29 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Jul 25 14:48:29 2017 -0700

--
 .../apache/phoenix/hive/PhoenixStorageHandler.java| 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cab56f39/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index ae8f242..4e9f465 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -51,6 +51,7 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.UUID;
 
 /**
  * This class manages all the Phoenix/Hive table initial configurations and 
SerDe Election
@@ -110,6 +111,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return PhoenixOutputFormat.class;
 }
 
+@SuppressWarnings("rawtypes")
+@Override
+public Class getInputFormatClass() {
+return PhoenixInputFormat.class;
+}
+
 @Override
 public void configureInputJobProperties(TableDesc tableDesc, Map
 jobProperties) {
@@ -182,7 +189,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 }
 SessionState sessionState = SessionState.get();
 
-String sessionId = sessionState.getSessionId();
+String sessionId;
+if(sessionState!= null) {
+sessionId = sessionState.getSessionId();
+}  else {
+sessionId = UUID.randomUUID().toString();
+}
 jobProperties.put(PhoenixConfigurationUtil.SESSION_ID, sessionId);
 jobProperties.put(PhoenixConfigurationUtil.INPUT_TABLE_NAME, 
tableName);
 jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM, 
tableProperties



[3/4] phoenix git commit: PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs

2017-07-25 Thread ssa
PHOENIX-3960 PhoenixStorageHandler for Hive doesn't work from Spark jobs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0a84cb8c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0a84cb8c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0a84cb8c

Branch: refs/heads/4.x-HBase-1.1
Commit: 0a84cb8c920fa90602e2e1a8ccfbc524b99c
Parents: e9498bf
Author: Sergey Soldatov 
Authored: Mon Jun 19 12:49:29 2017 -0700
Committer: Sergey Soldatov 
Committed: Tue Jul 25 14:39:03 2017 -0700

--
 .../apache/phoenix/hive/PhoenixStorageHandler.java| 14 +-
 1 file changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0a84cb8c/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
index ae8f242..4e9f465 100644
--- 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+++ 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
@@ -51,6 +51,7 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.UUID;
 
 /**
  * This class manages all the Phoenix/Hive table initial configurations and 
SerDe Election
@@ -110,6 +111,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 return PhoenixOutputFormat.class;
 }
 
+@SuppressWarnings("rawtypes")
+@Override
+public Class getInputFormatClass() {
+return PhoenixInputFormat.class;
+}
+
 @Override
 public void configureInputJobProperties(TableDesc tableDesc, Map
 jobProperties) {
@@ -182,7 +189,12 @@ public class PhoenixStorageHandler extends 
DefaultStorageHandler implements
 }
 SessionState sessionState = SessionState.get();
 
-String sessionId = sessionState.getSessionId();
+String sessionId;
+if(sessionState!= null) {
+sessionId = sessionState.getSessionId();
+}  else {
+sessionId = UUID.randomUUID().toString();
+}
 jobProperties.put(PhoenixConfigurationUtil.SESSION_ID, sessionId);
 jobProperties.put(PhoenixConfigurationUtil.INPUT_TABLE_NAME, 
tableName);
 jobProperties.put(PhoenixStorageHandlerConstants.ZOOKEEPER_QUORUM, 
tableProperties



[1/4] phoenix git commit: PHOENIX-3708 Tests introduced in PHOENIX-3346 doesn't work well with failsafe plugin

2017-06-10 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 ca550713f -> ca9c52dc6
  refs/heads/4.x-HBase-1.1 69db8b55b -> 7b9a87d7b
  refs/heads/4.x-HBase-1.2 4ab9d2208 -> b9f453703
  refs/heads/master 4c1181842 -> 616cd057d


PHOENIX-3708 Tests introduced in PHOENIX-3346 doesn't work well with failsafe 
plugin


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/616cd057
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/616cd057
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/616cd057

Branch: refs/heads/master
Commit: 616cd057d3c7d587aafe278948f8cff84efc9d29
Parents: 4c11818
Author: Sergey Soldatov 
Authored: Tue Mar 7 22:20:51 2017 -0800
Committer: Sergey Soldatov 
Committed: Sat Jun 10 10:09:25 2017 -0700

--
 phoenix-hive/pom.xml| 38 +---
 .../phoenix/hive/BaseHivePhoenixStoreIT.java|  2 +-
 .../apache/phoenix/hive/HiveMapReduceIT.java|  1 -
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  9 +++--
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  1 -
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  4 +--
 6 files changed, 33 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/616cd057/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 181e988..5e1f285 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,19 +110,31 @@
   hadoop-minicluster
   test
 
-  
-org.apache.tez
-tez-tests
-test
-0.8.4
-test-jar
-  
-  
-   org.apache.tez
-tez-dag
-test
-0.8.4
-  
+
+  org.apache.tez
+  tez-tests
+  test
+  0.8.4
+  test-jar
+  
+
+  org.apache.hadoop
+  hadoop-yarn-api
+
+  
+
+
+  org.apache.tez
+  tez-dag
+  test
+  0.8.4
+  
+
+  org.apache.hadoop
+  hadoop-yarn-api
+
+  
+
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/616cd057/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index ac0a7fc..afb06ae 100644
--- 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -86,7 +86,7 @@ public class BaseHivePhoenixStoreIT {
 }
 
 //Start HBase cluster
-hbaseCluster = hbaseTestUtil.startMiniCluster(3);
+hbaseCluster = hbaseTestUtil.startMiniCluster(1);
 MiniDFSCluster x = hbaseTestUtil.getDFSCluster();
 Class.forName(PhoenixDriver.class.getName());
 zkQuorum = "localhost:" + hbaseTestUtil.getZkCluster().getClientPort();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/616cd057/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index deaea6f..644ff24 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -23,7 +23,6 @@ import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
 import org.junit.Ignore;
 
-@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/616cd057/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
index f5823ea..b4c4e46 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
@@ -299,16 +299,19 @@ public class HiveTestUtil {
 ());
 
 HadoopShims shims = ShimLoader.getHadoopShims();
-int numberOfDataNodes = 4;
+int numberOfDataNodes = 1;
 
 if 

[2/4] phoenix git commit: PHOENIX-3708 Tests introduced in PHOENIX-3346 doesn't work well with failsafe plugin

2017-06-10 Thread ssa
PHOENIX-3708 Tests introduced in PHOENIX-3346 doesn't work well with failsafe 
plugin


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7b9a87d7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7b9a87d7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7b9a87d7

Branch: refs/heads/4.x-HBase-1.1
Commit: 7b9a87d7b77675636af80d2884bcb73cd5e7cc5c
Parents: 69db8b5
Author: Sergey Soldatov 
Authored: Tue Mar 7 22:20:51 2017 -0800
Committer: Sergey Soldatov 
Committed: Sat Jun 10 10:10:01 2017 -0700

--
 phoenix-hive/pom.xml| 38 +---
 .../phoenix/hive/BaseHivePhoenixStoreIT.java|  2 +-
 .../apache/phoenix/hive/HiveMapReduceIT.java|  1 -
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  9 +++--
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  1 -
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  4 +--
 6 files changed, 33 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7b9a87d7/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 8950f5f..282de02 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,19 +110,31 @@
   hadoop-minicluster
   test
 
-  
-org.apache.tez
-tez-tests
-test
-0.8.4
-test-jar
-  
-  
-   org.apache.tez
-tez-dag
-test
-0.8.4
-  
+
+  org.apache.tez
+  tez-tests
+  test
+  0.8.4
+  test-jar
+  
+
+  org.apache.hadoop
+  hadoop-yarn-api
+
+  
+
+
+  org.apache.tez
+  tez-dag
+  test
+  0.8.4
+  
+
+  org.apache.hadoop
+  hadoop-yarn-api
+
+  
+
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7b9a87d7/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index ac0a7fc..afb06ae 100644
--- 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -86,7 +86,7 @@ public class BaseHivePhoenixStoreIT {
 }
 
 //Start HBase cluster
-hbaseCluster = hbaseTestUtil.startMiniCluster(3);
+hbaseCluster = hbaseTestUtil.startMiniCluster(1);
 MiniDFSCluster x = hbaseTestUtil.getDFSCluster();
 Class.forName(PhoenixDriver.class.getName());
 zkQuorum = "localhost:" + hbaseTestUtil.getZkCluster().getClientPort();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7b9a87d7/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index deaea6f..644ff24 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -23,7 +23,6 @@ import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
 import org.junit.Ignore;
 
-@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7b9a87d7/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
index f5823ea..b4c4e46 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
@@ -299,16 +299,19 @@ public class HiveTestUtil {
 ());
 
 HadoopShims shims = ShimLoader.getHadoopShims();
-int numberOfDataNodes = 4;
+int numberOfDataNodes = 1;
 
 if (clusterType != MiniClusterType.none) {
 dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
 FileSystem fs = dfs.getFileSystem();
 String uriString = 

[3/4] phoenix git commit: PHOENIX-3708 Tests introduced in PHOENIX-3346 doesn't work well with failsafe plugin

2017-06-10 Thread ssa
PHOENIX-3708 Tests introduced in PHOENIX-3346 doesn't work well with failsafe 
plugin


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ca9c52dc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ca9c52dc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ca9c52dc

Branch: refs/heads/4.x-HBase-0.98
Commit: ca9c52dc6d99fa4fc30664524d172617f43b972b
Parents: ca55071
Author: Sergey Soldatov 
Authored: Tue Mar 7 22:20:51 2017 -0800
Committer: Sergey Soldatov 
Committed: Sat Jun 10 10:10:19 2017 -0700

--
 phoenix-hive/pom.xml| 38 +---
 .../phoenix/hive/BaseHivePhoenixStoreIT.java|  2 +-
 .../apache/phoenix/hive/HiveMapReduceIT.java|  1 -
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  9 +++--
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  1 -
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  4 +--
 6 files changed, 33 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca9c52dc/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index c2dfd4c..d668910 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,19 +110,31 @@
   hadoop-minicluster
   test
 
-  
-org.apache.tez
-tez-tests
-test
-0.8.4
-test-jar
-  
-  
-   org.apache.tez
-tez-dag
-test
-0.8.4
-  
+
+  org.apache.tez
+  tez-tests
+  test
+  0.8.4
+  test-jar
+  
+
+  org.apache.hadoop
+  hadoop-yarn-api
+
+  
+
+
+  org.apache.tez
+  tez-dag
+  test
+  0.8.4
+  
+
+  org.apache.hadoop
+  hadoop-yarn-api
+
+  
+
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca9c52dc/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index ac0a7fc..afb06ae 100644
--- 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -86,7 +86,7 @@ public class BaseHivePhoenixStoreIT {
 }
 
 //Start HBase cluster
-hbaseCluster = hbaseTestUtil.startMiniCluster(3);
+hbaseCluster = hbaseTestUtil.startMiniCluster(1);
 MiniDFSCluster x = hbaseTestUtil.getDFSCluster();
 Class.forName(PhoenixDriver.class.getName());
 zkQuorum = "localhost:" + hbaseTestUtil.getZkCluster().getClientPort();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca9c52dc/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index deaea6f..644ff24 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -23,7 +23,6 @@ import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
 import org.junit.Ignore;
 
-@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ca9c52dc/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
index f5823ea..b4c4e46 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
@@ -299,16 +299,19 @@ public class HiveTestUtil {
 ());
 
 HadoopShims shims = ShimLoader.getHadoopShims();
-int numberOfDataNodes = 4;
+int numberOfDataNodes = 1;
 
 if (clusterType != MiniClusterType.none) {
 dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
 FileSystem fs = dfs.getFileSystem();
 String uriString = 

[4/4] phoenix git commit: PHOENIX-3843 Improve logging for UNION ALL errors

2017-05-22 Thread ssa
PHOENIX-3843 Improve logging for UNION ALL errors


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e746c225
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e746c225
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e746c225

Branch: refs/heads/4.x-HBase-1.2
Commit: e746c225eddd59936d1f21ba2f5814c2d683a5d1
Parents: 1089db9
Author: Sergey Soldatov 
Authored: Wed May 10 00:44:17 2017 -0700
Committer: Sergey Soldatov 
Committed: Mon May 22 11:00:06 2017 -0700

--
 .../apache/phoenix/compile/UnionCompiler.java   |  7 ++-
 .../phoenix/compile/QueryCompilerTest.java  | 47 
 2 files changed, 52 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e746c225/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
index e5e18e3..c7f798c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
@@ -58,7 +58,8 @@ public class UnionCompiler {
 for (QueryPlan plan : selectPlans) {
 if (columnCount !=plan.getProjector().getColumnCount()) {
 throw new SQLExceptionInfo.Builder(SQLExceptionCode
-.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage(".")
+.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage("1st 
query has " + columnCount + " columns whereas 2nd " +
+"query has " + 
plan.getProjector().getColumnCount())
 .build().buildException();
 }
 ColumnProjector colproj = 
plan.getProjector().getColumnProjector(i);
@@ -116,7 +117,9 @@ public class UnionCompiler {
 targetTypes.get(i).setType(type);
 } else {
 throw new SQLExceptionInfo.Builder(SQLExceptionCode
-.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage(".")
+.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage("Column # " 
+ i + " is "
++ targetTypes.get(i).getType().getSqlTypeName() + " in 1st 
query where as it is "
++ type.getSqlTypeName() + " in 2nd query")
 .build().buildException();
 }
 Integer len = expression.getMaxLength();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e746c225/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 4bc7d2b..9d0e3d2 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -4134,4 +4134,51 @@ public class QueryCompilerTest extends 
BaseConnectionlessQueryTest {
 }
 }
 }
+
+@Test
+public void testUnionDifferentColumnNumber() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+Statement statement = conn.createStatement();
+try {
+String create = "CREATE TABLE s.t1 (k integer not null primary 
key, f1.v1 varchar, f1.v2 varchar, " +
+"f2.v3 varchar, v4 varchar)";
+statement.execute(create);
+create = "CREATE TABLE s.t2 (k integer not null primary key, f1.v1 
varchar, f1.v2 varchar, f2.v3 varchar)";
+statement.execute(create);
+String query = "SELECT *  FROM s.t1 UNION ALL select * FROM s.t2";
+statement.executeQuery(query);
+fail("Should fail with different column numbers ");
+} catch (SQLException e) {
+assertEquals(e.getMessage(), "ERROR 525 (42902): SELECT column 
number differs in a Union All query " +
+"is not allowed. 1st query has 5 columns whereas 2nd query 
has 4");
+} finally {
+statement.execute("DROP TABLE IF EXISTS s.t1");
+statement.execute("DROP TABLE IF EXISTS s.t2");
+conn.close();
+}
+}
+
+@Test
+public void testUnionDifferentColumnType() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+Statement statement = conn.createStatement();
+try {
+String create = "CREATE TABLE s.t1 (k integer not null primary 
key, 

[1/4] phoenix git commit: PHOENIX-3843 Improve logging for UNION ALL errors

2017-05-22 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 655e2d33d -> f5a2f6d76
  refs/heads/4.x-HBase-1.1 3eddc4a41 -> da5d33b08
  refs/heads/4.x-HBase-1.2 1089db92b -> e746c225e
  refs/heads/master bd2751266 -> e27fae931


PHOENIX-3843 Improve logging for UNION ALL errors


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e27fae93
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e27fae93
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e27fae93

Branch: refs/heads/master
Commit: e27fae93191d9540574765065900a23bda90905f
Parents: bd27512
Author: Sergey Soldatov 
Authored: Wed May 10 00:44:17 2017 -0700
Committer: Sergey Soldatov 
Committed: Mon May 22 10:57:56 2017 -0700

--
 .../apache/phoenix/compile/UnionCompiler.java   |  7 ++-
 .../phoenix/compile/QueryCompilerTest.java  | 47 
 2 files changed, 52 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e27fae93/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
index e5e18e3..c7f798c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
@@ -58,7 +58,8 @@ public class UnionCompiler {
 for (QueryPlan plan : selectPlans) {
 if (columnCount !=plan.getProjector().getColumnCount()) {
 throw new SQLExceptionInfo.Builder(SQLExceptionCode
-.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage(".")
+.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage("1st 
query has " + columnCount + " columns whereas 2nd " +
+"query has " + 
plan.getProjector().getColumnCount())
 .build().buildException();
 }
 ColumnProjector colproj = 
plan.getProjector().getColumnProjector(i);
@@ -116,7 +117,9 @@ public class UnionCompiler {
 targetTypes.get(i).setType(type);
 } else {
 throw new SQLExceptionInfo.Builder(SQLExceptionCode
-.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage(".")
+.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage("Column # " 
+ i + " is "
++ targetTypes.get(i).getType().getSqlTypeName() + " in 1st 
query where as it is "
++ type.getSqlTypeName() + " in 2nd query")
 .build().buildException();
 }
 Integer len = expression.getMaxLength();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e27fae93/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 4bc7d2b..9d0e3d2 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -4134,4 +4134,51 @@ public class QueryCompilerTest extends 
BaseConnectionlessQueryTest {
 }
 }
 }
+
+@Test
+public void testUnionDifferentColumnNumber() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+Statement statement = conn.createStatement();
+try {
+String create = "CREATE TABLE s.t1 (k integer not null primary 
key, f1.v1 varchar, f1.v2 varchar, " +
+"f2.v3 varchar, v4 varchar)";
+statement.execute(create);
+create = "CREATE TABLE s.t2 (k integer not null primary key, f1.v1 
varchar, f1.v2 varchar, f2.v3 varchar)";
+statement.execute(create);
+String query = "SELECT *  FROM s.t1 UNION ALL select * FROM s.t2";
+statement.executeQuery(query);
+fail("Should fail with different column numbers ");
+} catch (SQLException e) {
+assertEquals(e.getMessage(), "ERROR 525 (42902): SELECT column 
number differs in a Union All query " +
+"is not allowed. 1st query has 5 columns whereas 2nd query 
has 4");
+} finally {
+statement.execute("DROP TABLE IF EXISTS s.t1");
+statement.execute("DROP TABLE IF EXISTS s.t2");
+conn.close();
+}
+}
+
+@Test
+public void testUnionDifferentColumnType() throws 

[3/4] phoenix git commit: PHOENIX-3843 Improve logging for UNION ALL errors

2017-05-22 Thread ssa
PHOENIX-3843 Improve logging for UNION ALL errors


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/da5d33b0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/da5d33b0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/da5d33b0

Branch: refs/heads/4.x-HBase-1.1
Commit: da5d33b0822fa7b29251709c92e754e3f8a0cf7c
Parents: 3eddc4a
Author: Sergey Soldatov 
Authored: Wed May 10 00:44:17 2017 -0700
Committer: Sergey Soldatov 
Committed: Mon May 22 10:59:30 2017 -0700

--
 .../apache/phoenix/compile/UnionCompiler.java   |  7 ++-
 .../phoenix/compile/QueryCompilerTest.java  | 47 
 2 files changed, 52 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/da5d33b0/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
index e5e18e3..c7f798c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
@@ -58,7 +58,8 @@ public class UnionCompiler {
 for (QueryPlan plan : selectPlans) {
 if (columnCount !=plan.getProjector().getColumnCount()) {
 throw new SQLExceptionInfo.Builder(SQLExceptionCode
-.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage(".")
+.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage("1st 
query has " + columnCount + " columns whereas 2nd " +
+"query has " + 
plan.getProjector().getColumnCount())
 .build().buildException();
 }
 ColumnProjector colproj = 
plan.getProjector().getColumnProjector(i);
@@ -116,7 +117,9 @@ public class UnionCompiler {
 targetTypes.get(i).setType(type);
 } else {
 throw new SQLExceptionInfo.Builder(SQLExceptionCode
-.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage(".")
+.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage("Column # " 
+ i + " is "
++ targetTypes.get(i).getType().getSqlTypeName() + " in 1st 
query where as it is "
++ type.getSqlTypeName() + " in 2nd query")
 .build().buildException();
 }
 Integer len = expression.getMaxLength();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/da5d33b0/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 4bc7d2b..9d0e3d2 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -4134,4 +4134,51 @@ public class QueryCompilerTest extends 
BaseConnectionlessQueryTest {
 }
 }
 }
+
+@Test
+public void testUnionDifferentColumnNumber() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+Statement statement = conn.createStatement();
+try {
+String create = "CREATE TABLE s.t1 (k integer not null primary 
key, f1.v1 varchar, f1.v2 varchar, " +
+"f2.v3 varchar, v4 varchar)";
+statement.execute(create);
+create = "CREATE TABLE s.t2 (k integer not null primary key, f1.v1 
varchar, f1.v2 varchar, f2.v3 varchar)";
+statement.execute(create);
+String query = "SELECT *  FROM s.t1 UNION ALL select * FROM s.t2";
+statement.executeQuery(query);
+fail("Should fail with different column numbers ");
+} catch (SQLException e) {
+assertEquals(e.getMessage(), "ERROR 525 (42902): SELECT column 
number differs in a Union All query " +
+"is not allowed. 1st query has 5 columns whereas 2nd query 
has 4");
+} finally {
+statement.execute("DROP TABLE IF EXISTS s.t1");
+statement.execute("DROP TABLE IF EXISTS s.t2");
+conn.close();
+}
+}
+
+@Test
+public void testUnionDifferentColumnType() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+Statement statement = conn.createStatement();
+try {
+String create = "CREATE TABLE s.t1 (k integer not null primary 
key, 

[2/4] phoenix git commit: PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346

2017-03-02 Thread ssa
PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7ff760fd
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7ff760fd
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7ff760fd

Branch: refs/heads/4.x-HBase-0.98
Commit: 7ff760fd5df9cd9b99ad72c68b339bc80f29adcf
Parents: 596f02e
Author: Sergey Soldatov 
Authored: Thu Mar 2 16:13:50 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Mar 2 16:17:05 2017 -0800

--
 .../src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java   | 2 ++
 phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java| 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7ff760fd/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 7203597..deaea6f 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7ff760fd/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
index a675a0e..8d2848d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveTezIT extends HivePhoenixStoreIT {
 



[4/4] phoenix git commit: PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346

2017-03-02 Thread ssa
PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/fad9c55e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/fad9c55e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/fad9c55e

Branch: refs/heads/4.x-HBase-1.3
Commit: fad9c55e7cf1a667561c999090511b1421e3f4a1
Parents: 966b13b
Author: Sergey Soldatov 
Authored: Thu Mar 2 16:13:50 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Mar 2 16:17:33 2017 -0800

--
 .../src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java   | 2 ++
 phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java| 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/fad9c55e/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 7203597..deaea6f 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/fad9c55e/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
index a675a0e..8d2848d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveTezIT extends HivePhoenixStoreIT {
 



[3/4] phoenix git commit: PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346

2017-03-02 Thread ssa
PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b60bcd89
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b60bcd89
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b60bcd89

Branch: refs/heads/4.x-HBase-1.1
Commit: b60bcd89b462e11dcbf1ce1f6d194de616e2244c
Parents: 93a3412
Author: Sergey Soldatov 
Authored: Thu Mar 2 16:13:50 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Mar 2 16:17:27 2017 -0800

--
 .../src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java   | 2 ++
 phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java| 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b60bcd89/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 7203597..deaea6f 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b60bcd89/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
index a675a0e..8d2848d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveTezIT extends HivePhoenixStoreIT {
 



[1/4] phoenix git commit: PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346

2017-03-02 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 596f02e2e -> 7ff760fd5
  refs/heads/4.x-HBase-1.1 93a3412d8 -> b60bcd89b
  refs/heads/4.x-HBase-1.3 966b13bd0 -> fad9c55e7
  refs/heads/master 450766e79 -> cf65fb27e


PHOENIX-3708 temporary disable tests introduced in PHOENIX-3346


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cf65fb27
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cf65fb27
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cf65fb27

Branch: refs/heads/master
Commit: cf65fb27edf6291500e3f7e7549c4b83240f
Parents: 450766e
Author: Sergey Soldatov 
Authored: Thu Mar 2 16:13:50 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Mar 2 16:16:43 2017 -0800

--
 .../src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java   | 2 ++
 phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java| 2 ++
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cf65fb27/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 7203597..deaea6f 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveMapReduceIT extends HivePhoenixStoreIT {
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/cf65fb27/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
--
diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
index a675a0e..8d2848d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
@@ -21,7 +21,9 @@ package org.apache.phoenix.hive;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
+import org.junit.Ignore;
 
+@Ignore("Temporary disabled to run in regular build due strange 
incompatibility with failsafe plugion")
 @Category(NeedsOwnMiniClusterTest.class)
 public class HiveTezIT extends HivePhoenixStoreIT {
 



[1/3] phoenix git commit: Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-02 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 321b62edd -> 596f02e2e
  refs/heads/4.x-HBase-1.1 91a436dd3 -> f6f4930a7
  refs/heads/4.x-HBase-1.3 4b5291d29 -> 966b13bd0


Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with 
column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f6f4930a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f6f4930a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f6f4930a

Branch: refs/heads/4.x-HBase-1.1
Commit: f6f4930a71c12db081d2511ee24eeb3d81314643
Parents: 91a436d
Author: Sergey Soldatov 
Authored: Wed Mar 1 15:41:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Mar 2 12:53:26 2017 -0800

--
 .../hive/query/PhoenixQueryBuilderTest.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f6f4930a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
--
diff --git 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index e4f872e..bc2cbe3 100644
--- 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -102,7 +102,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not in ('CHAR1', 
'CHAR2', 'CHAR3')",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 
'CHAR2', 'CHAR3')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -118,7 +118,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not between 'CHAR1' 
and 'CHAR2'",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 
'CHAR1' and 'CHAR2'",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -126,8 +126,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildBetweenQueryWithDateColumns() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -138,7 +138,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" between to_date('1992-01-02') 
and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -147,7 +147,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " not between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" not between 
to_date('1992-01-02') and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -155,8 +155,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildQueryWithNotNull() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -167,7 +167,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " is not null ",
+ 

[3/3] phoenix git commit: Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-02 Thread ssa
Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with 
column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/966b13bd
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/966b13bd
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/966b13bd

Branch: refs/heads/4.x-HBase-1.3
Commit: 966b13bd0ae4eb670360ce0c8ae4501f2d61fa4a
Parents: 4b5291d
Author: Sergey Soldatov 
Authored: Wed Mar 1 15:41:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Thu Mar 2 12:54:08 2017 -0800

--
 .../hive/query/PhoenixQueryBuilderTest.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/966b13bd/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
--
diff --git 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index e4f872e..bc2cbe3 100644
--- 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -102,7 +102,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not in ('CHAR1', 
'CHAR2', 'CHAR3')",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 
'CHAR2', 'CHAR3')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -118,7 +118,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not between 'CHAR1' 
and 'CHAR2'",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 
'CHAR1' and 'CHAR2'",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -126,8 +126,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildBetweenQueryWithDateColumns() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -138,7 +138,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" between to_date('1992-01-02') 
and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -147,7 +147,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " not between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" not between 
to_date('1992-01-02') and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -155,8 +155,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildQueryWithNotNull() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -167,7 +167,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " is not null ",
+"\"" + COLUMN_DATE + "\" is not null ",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 }



[phoenix] Git Push Summary

2017-03-02 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-Hbase-1.3 [deleted] 2bdaf125f


[phoenix] Git Push Summary

2017-03-02 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-Hbase-0.98 [deleted] bf5495e49


[phoenix] Git Push Summary

2017-03-02 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-Hbase-1.1 [deleted] e46cd90dd


[1/4] phoenix git commit: Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-Hbase-0.98 [created] bf5495e49
  refs/heads/4.x-Hbase-1.1 [created] e46cd90dd
  refs/heads/4.x-Hbase-1.3 [created] 2bdaf125f
  refs/heads/master 1e2a9675c -> 9ba564bbd


Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with 
column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9ba564bb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9ba564bb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9ba564bb

Branch: refs/heads/master
Commit: 9ba564bbd972284554ceec18e34016ac458997b2
Parents: 1e2a967
Author: Sergey Soldatov 
Authored: Wed Mar 1 15:41:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 16:02:38 2017 -0800

--
 .../hive/query/PhoenixQueryBuilderTest.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9ba564bb/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
--
diff --git 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index e4f872e..bc2cbe3 100644
--- 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -102,7 +102,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not in ('CHAR1', 
'CHAR2', 'CHAR3')",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 
'CHAR2', 'CHAR3')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -118,7 +118,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not between 'CHAR1' 
and 'CHAR2'",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 
'CHAR1' and 'CHAR2'",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -126,8 +126,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildBetweenQueryWithDateColumns() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -138,7 +138,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" between to_date('1992-01-02') 
and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -147,7 +147,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " not between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" not between 
to_date('1992-01-02') and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -155,8 +155,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildQueryWithNotNull() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -167,7 +167,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " 

[3/4] phoenix git commit: Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with 
column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e46cd90d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e46cd90d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e46cd90d

Branch: refs/heads/4.x-Hbase-1.1
Commit: e46cd90dd79d3c23872270d1043ba43186ae84ac
Parents: 916c4c4
Author: Sergey Soldatov 
Authored: Wed Mar 1 15:41:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 16:03:38 2017 -0800

--
 .../hive/query/PhoenixQueryBuilderTest.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e46cd90d/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
--
diff --git 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index e4f872e..bc2cbe3 100644
--- 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -102,7 +102,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not in ('CHAR1', 
'CHAR2', 'CHAR3')",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 
'CHAR2', 'CHAR3')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -118,7 +118,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not between 'CHAR1' 
and 'CHAR2'",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 
'CHAR1' and 'CHAR2'",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -126,8 +126,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildBetweenQueryWithDateColumns() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -138,7 +138,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" between to_date('1992-01-02') 
and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -147,7 +147,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " not between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" not between 
to_date('1992-01-02') and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -155,8 +155,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildQueryWithNotNull() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -167,7 +167,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " is not null ",
+"\"" + COLUMN_DATE + "\" is not null ",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 }



[2/4] phoenix git commit: Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with 
column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bf5495e4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bf5495e4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bf5495e4

Branch: refs/heads/4.x-Hbase-0.98
Commit: bf5495e498606d44969b49ef0ccd12c5a76dc075
Parents: 32cbbd0
Author: Sergey Soldatov 
Authored: Wed Mar 1 15:41:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 16:03:32 2017 -0800

--
 .../hive/query/PhoenixQueryBuilderTest.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bf5495e4/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
--
diff --git 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index e4f872e..bc2cbe3 100644
--- 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -102,7 +102,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not in ('CHAR1', 
'CHAR2', 'CHAR3')",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 
'CHAR2', 'CHAR3')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -118,7 +118,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not between 'CHAR1' 
and 'CHAR2'",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 
'CHAR1' and 'CHAR2'",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -126,8 +126,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildBetweenQueryWithDateColumns() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -138,7 +138,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" between to_date('1992-01-02') 
and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -147,7 +147,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " not between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" not between 
to_date('1992-01-02') and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -155,8 +155,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildQueryWithNotNull() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -167,7 +167,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " is not null ",
+"\"" + COLUMN_DATE + "\" is not null ",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 }



[4/4] phoenix git commit: Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
Fixed UT for PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with 
column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2bdaf125
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2bdaf125
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2bdaf125

Branch: refs/heads/4.x-Hbase-1.3
Commit: 2bdaf125ffd6ebe8a23ddda04328f3ce58a030d1
Parents: 4b5291d
Author: Sergey Soldatov 
Authored: Wed Mar 1 15:41:49 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 16:03:42 2017 -0800

--
 .../hive/query/PhoenixQueryBuilderTest.java   | 18 +-
 1 file changed, 9 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2bdaf125/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
--
diff --git 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
index e4f872e..bc2cbe3 100644
--- 
a/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
+++ 
b/phoenix-hive/src/test/java/org/apache/phoenix/hive/query/PhoenixQueryBuilderTest.java
@@ -102,7 +102,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2", "CHAR3"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not in ('CHAR1', 
'CHAR2', 'CHAR3')",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not in ('CHAR1', 
'CHAR2', 'CHAR3')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -118,7 +118,7 @@ public class PhoenixQueryBuilderTest {
 new Object[]{"CHAR1", "CHAR2"}, COLUMN_CHAR, 
"char(10)", true)
 );
 
-assertEquals(expectedQueryPrefix + "Column_Char not between 'CHAR1' 
and 'CHAR2'",
+assertEquals(expectedQueryPrefix + "\"Column_Char\" not between 
'CHAR1' and 'CHAR2'",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -126,8 +126,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildBetweenQueryWithDateColumns() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -138,7 +138,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" between to_date('1992-01-02') 
and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 
 searchConditions = Lists.newArrayList(
@@ -147,7 +147,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " not between to_date('1992-01-02') and 
to_date('1992-02-02')",
+"\"" + COLUMN_DATE + "\" not between 
to_date('1992-01-02') and to_date('1992-02-02')",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 
@@ -155,8 +155,8 @@ public class PhoenixQueryBuilderTest {
 public void testBuildQueryWithNotNull() throws IOException {
 final String COLUMN_DATE = "Column_Date";
 final String tableName = "TEST_TABLE";
-final String expectedQueryPrefix = "select /*+ NO_CACHE  */ " + 
COLUMN_DATE +
-" from " + tableName + " where ";
+final String expectedQueryPrefix = "select /*+ NO_CACHE  */ \"" + 
COLUMN_DATE +
+"\" from " + tableName + " where ";
 
 JobConf jobConf = new JobConf();
 List readColumnList = Lists.newArrayList(COLUMN_DATE);
@@ -167,7 +167,7 @@ public class PhoenixQueryBuilderTest {
 );
 
 assertEquals(expectedQueryPrefix +
-COLUMN_DATE + " is not null ",
+"\"" + COLUMN_DATE + "\" is not null ",
 BUILDER.buildQuery(jobConf, TABLE_NAME, readColumnList, 
searchConditions));
 }
 }



[2/4] phoenix git commit: PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9fd13ed2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9fd13ed2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9fd13ed2

Branch: refs/heads/4.x-HBase-1.1
Commit: 9fd13ed27fe9b7bfa5840e57512ed55f0cc6feac
Parents: b1faa68
Author: Sergey Soldatov 
Authored: Wed Mar 1 11:51:46 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 11:57:28 2017 -0800

--
 phoenix-hive/pom.xml|  13 +
 .../phoenix/hive/BaseHivePhoenixStoreIT.java| 165 ++
 .../apache/phoenix/hive/HiveMapReduceIT.java|  32 ++
 .../apache/phoenix/hive/HivePhoenixStoreIT.java | 330 ++-
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  22 +-
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  32 ++
 .../apache/phoenix/hive/PhoenixMetaHook.java|  37 +--
 .../org/apache/phoenix/hive/PhoenixSerDe.java   |   9 +-
 .../apache/phoenix/hive/PhoenixSerializer.java  |   4 +
 .../phoenix/hive/PhoenixStorageHandler.java |   5 +
 .../hive/mapreduce/PhoenixInputFormat.java  |   3 +-
 .../hive/mapreduce/PhoenixRecordReader.java |   1 +
 .../hive/mapreduce/PhoenixResultWritable.java   |  12 +-
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  76 -
 .../phoenix/hive/util/ColumnMappingUtils.java   |  76 +
 .../hive/util/PhoenixConnectionUtil.java|  19 ++
 .../hive/query/PhoenixQueryBuilderTest.java |  10 +-
 17 files changed, 604 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9fd13ed2/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index ce9f8a4..cb1c372 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,6 +110,19 @@
   hadoop-minicluster
   test
 
+  
+org.apache.tez
+tez-tests
+test
+0.8.4
+test-jar
+  
+  
+   org.apache.tez
+tez-dag
+test
+0.8.4
+  
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/9fd13ed2/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
new file mode 100644
index 000..ac0a7fc
--- /dev/null
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.hive;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.AfterClass;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.*;
+import java.util.Properties;
+
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Base class for all Hive Phoenix integration tests that may be run with Tez 
or MR mini cluster
+ */
+public class BaseHivePhoenixStoreIT {
+
+private static final Log LOG = 
LogFactory.getLog(BaseHivePhoenixStoreIT.class);
+protected static 

[4/4] phoenix git commit: PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9da774aa
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9da774aa
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9da774aa

Branch: refs/heads/4.x-HBase-0.98
Commit: 9da774aa577aca9ce780c0ff0dd8661ee47ee4e5
Parents: e5ca33f
Author: Sergey Soldatov 
Authored: Wed Mar 1 11:51:46 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 12:10:05 2017 -0800

--
 phoenix-hive/pom.xml|  13 +
 .../phoenix/hive/BaseHivePhoenixStoreIT.java| 165 ++
 .../apache/phoenix/hive/HiveMapReduceIT.java|  32 ++
 .../apache/phoenix/hive/HivePhoenixStoreIT.java | 330 ++-
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  22 +-
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  32 ++
 .../apache/phoenix/hive/PhoenixMetaHook.java|  37 +--
 .../org/apache/phoenix/hive/PhoenixSerDe.java   |   9 +-
 .../apache/phoenix/hive/PhoenixSerializer.java  |   4 +
 .../phoenix/hive/PhoenixStorageHandler.java |   5 +
 .../hive/mapreduce/PhoenixInputFormat.java  |   4 +-
 .../hive/mapreduce/PhoenixRecordReader.java |   1 +
 .../hive/mapreduce/PhoenixResultWritable.java   |  12 +-
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  76 -
 .../phoenix/hive/util/ColumnMappingUtils.java   |  76 +
 .../hive/util/PhoenixConnectionUtil.java|  19 ++
 .../hive/query/PhoenixQueryBuilderTest.java |  10 +-
 17 files changed, 605 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9da774aa/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index c0cc6fd..585fd34 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,6 +110,19 @@
   hadoop-minicluster
   test
 
+  
+org.apache.tez
+tez-tests
+test
+0.8.4
+test-jar
+  
+  
+   org.apache.tez
+tez-dag
+test
+0.8.4
+  
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/9da774aa/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
new file mode 100644
index 000..ac0a7fc
--- /dev/null
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.hive;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.AfterClass;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.*;
+import java.util.Properties;
+
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Base class for all Hive Phoenix integration tests that may be run with Tez 
or MR mini cluster
+ */
+public class BaseHivePhoenixStoreIT {
+
+private static final Log LOG = 
LogFactory.getLog(BaseHivePhoenixStoreIT.class);
+protected static 

[1/4] phoenix git commit: PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 e5ca33f41 -> 9da774aa5
  refs/heads/4.x-HBase-1.1 b1faa686c -> 9fd13ed27
  refs/heads/4.x-HBase-1.3 69d6aa040 -> 1ae7177d8
  refs/heads/master 5f5662b24 -> 7201dd5e1


PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7201dd5e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7201dd5e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7201dd5e

Branch: refs/heads/master
Commit: 7201dd5e17096209d26ca3620054fc72665cf4fe
Parents: 5f5662b
Author: Sergey Soldatov 
Authored: Wed Mar 1 11:51:46 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 11:55:06 2017 -0800

--
 phoenix-hive/pom.xml|  13 +
 .../phoenix/hive/BaseHivePhoenixStoreIT.java| 165 ++
 .../apache/phoenix/hive/HiveMapReduceIT.java|  32 ++
 .../apache/phoenix/hive/HivePhoenixStoreIT.java | 330 ++-
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  22 +-
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  32 ++
 .../apache/phoenix/hive/PhoenixMetaHook.java|  37 +--
 .../org/apache/phoenix/hive/PhoenixSerDe.java   |   9 +-
 .../apache/phoenix/hive/PhoenixSerializer.java  |   4 +
 .../phoenix/hive/PhoenixStorageHandler.java |   5 +
 .../hive/mapreduce/PhoenixInputFormat.java  |   3 +-
 .../hive/mapreduce/PhoenixRecordReader.java |   1 +
 .../hive/mapreduce/PhoenixResultWritable.java   |  12 +-
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  76 -
 .../phoenix/hive/util/ColumnMappingUtils.java   |  76 +
 .../hive/util/PhoenixConnectionUtil.java|  19 ++
 .../hive/query/PhoenixQueryBuilderTest.java |  10 +-
 17 files changed, 604 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7201dd5e/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index e6d3f86..c6f5d40 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,6 +110,19 @@
   hadoop-minicluster
   test
 
+  
+org.apache.tez
+tez-tests
+test
+0.8.4
+test-jar
+  
+  
+   org.apache.tez
+tez-dag
+test
+0.8.4
+  
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7201dd5e/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
new file mode 100644
index 000..ac0a7fc
--- /dev/null
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.hive;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.AfterClass;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.*;
+import java.util.Properties;
+
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Base class for all Hive Phoenix 

[3/4] phoenix git commit: PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column

2017-03-01 Thread ssa
PHOENIX-3346 Hive PhoenixStorageHandler doesn't work well with column


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1ae7177d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1ae7177d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1ae7177d

Branch: refs/heads/4.x-HBase-1.3
Commit: 1ae7177d8f7cf3928d71cf127e79f1880e10f0f7
Parents: 69d6aa0
Author: Sergey Soldatov 
Authored: Wed Mar 1 11:51:46 2017 -0800
Committer: Sergey Soldatov 
Committed: Wed Mar 1 11:57:35 2017 -0800

--
 phoenix-hive/pom.xml|  13 +
 .../phoenix/hive/BaseHivePhoenixStoreIT.java| 165 ++
 .../apache/phoenix/hive/HiveMapReduceIT.java|  32 ++
 .../apache/phoenix/hive/HivePhoenixStoreIT.java | 330 ++-
 .../org/apache/phoenix/hive/HiveTestUtil.java   |  22 +-
 .../java/org/apache/phoenix/hive/HiveTezIT.java |  32 ++
 .../apache/phoenix/hive/PhoenixMetaHook.java|  37 +--
 .../org/apache/phoenix/hive/PhoenixSerDe.java   |   9 +-
 .../apache/phoenix/hive/PhoenixSerializer.java  |   4 +
 .../phoenix/hive/PhoenixStorageHandler.java |   5 +
 .../hive/mapreduce/PhoenixInputFormat.java  |   3 +-
 .../hive/mapreduce/PhoenixRecordReader.java |   1 +
 .../hive/mapreduce/PhoenixResultWritable.java   |  12 +-
 .../phoenix/hive/query/PhoenixQueryBuilder.java |  76 -
 .../phoenix/hive/util/ColumnMappingUtils.java   |  76 +
 .../hive/util/PhoenixConnectionUtil.java|  19 ++
 .../hive/query/PhoenixQueryBuilderTest.java |  10 +-
 17 files changed, 604 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1ae7177d/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 2d0ef2b..d6ccdfe 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -110,6 +110,19 @@
   hadoop-minicluster
   test
 
+  
+org.apache.tez
+tez-tests
+test
+0.8.4
+test-jar
+  
+  
+   org.apache.tez
+tez-dag
+test
+0.8.4
+  
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1ae7177d/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
new file mode 100644
index 000..ac0a7fc
--- /dev/null
+++ 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.hive;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.AfterClass;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.*;
+import java.util.Properties;
+
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Base class for all Hive Phoenix integration tests that may be run with Tez 
or MR mini cluster
+ */
+public class BaseHivePhoenixStoreIT {
+
+private static final Log LOG = 
LogFactory.getLog(BaseHivePhoenixStoreIT.class);
+protected static 

  1   2   >