Jenkins build is back to normal : Phoenix | Master #1336

2016-07-22 Thread Apache Jenkins Server
See 



Apache-Phoenix | 4.x-HBase-1.0 | Build Successful

2016-07-22 Thread Apache Jenkins Server
4.x-HBase-1.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-1.0

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-1.0/lastCompletedBuild/testReport/

Changes
[samarth] PHOENIX-3109 Improve and fix the way we are caching column family names



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2016-07-22 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-0.98

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[samarth] PHOENIX-3109 Improve and fix the way we are caching column family names



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Build failed in Jenkins: Phoenix | Master #1335

2016-07-22 Thread Apache Jenkins Server
See 

Changes:

[samarth] PHOENIX-3109 Improve and fix the way we are caching column family 
names

--
[...truncated 1532 lines...]
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.getRegion(RSRpcServices.java:1053)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1969)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33652)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2170)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:109)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)

Fri Jul 22 21:37:35 UTC 2016, RpcRetryingCaller{globalStartTime=1469223085223, 
pause=100, retries=35}, org.apache.hadoop.hbase.NotServingRegionException: 
org.apache.hadoop.hbase.NotServingRegionException: Region 
SYSTEM.CATALOG,,1469223079407.eb36e496a34cd5599b2521712ad07836. is not online 
on pomona.apache.org,44635,1469223072111
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.getRegionByEncodedName(HRegionServer.java:2911)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.getRegion(RSRpcServices.java:1053)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1969)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33652)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2170)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:109)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)

Fri Jul 22 21:37:55 UTC 2016, RpcRetryingCaller{globalStartTime=1469223085223, 
pause=100, retries=35}, org.apache.hadoop.hbase.NotServingRegionException: 
org.apache.hadoop.hbase.NotServingRegionException: Region 
SYSTEM.CATALOG,,1469223079407.eb36e496a34cd5599b2521712ad07836. is not online 
on pomona.apache.org,44635,1469223072111
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.getRegionByEncodedName(HRegionServer.java:2911)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.getRegion(RSRpcServices.java:1053)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1969)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33652)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2170)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:109)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)

Fri Jul 22 21:38:15 UTC 2016, RpcRetryingCaller{globalStartTime=1469223085223, 
pause=100, retries=35}, org.apache.hadoop.hbase.NotServingRegionException: 
org.apache.hadoop.hbase.NotServingRegionException: Region 
SYSTEM.CATALOG,,1469223079407.eb36e496a34cd5599b2521712ad07836. is not online 
on pomona.apache.org,44635,1469223072111
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.getRegionByEncodedName(HRegionServer.java:2911)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.getRegion(RSRpcServices.java:1053)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1969)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:33652)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2170)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:109)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)

Fri Jul 22 21:38:35 UTC 2016, RpcRetryingCaller{globalStartTime=1469223085223, 
pause=100, retries=35}, org.apache.hadoop.hbase.NotServingRegionException: 
org.apache.hadoop.hbase.NotServingRegionException: Region 
SYSTEM.CATALOG,,1469223079407.eb36e496a34cd5599b2521712ad07836. is not online 
on pomona.apache.org,44635,1469223072111
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.getRegionByEncodedName(HRegionServer.java:2911)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.getRegion(RSRpcServices.java:1053)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1969)
at 

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #130

2016-07-22 Thread Apache Jenkins Server
See 

Changes:

[samarth] PHOENIX-3109 Improve and fix the way we are caching column family 
names

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H10 (docker Ubuntu ubuntu yahoo-not-h2) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git -c core.askpass=true fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git rev-parse origin/4.x-HBase-1.1^{commit} # timeout=10
Checking out Revision 1078209bfcd1c73f1c60c28f56fd7d43e158e764 
(origin/4.x-HBase-1.1)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 1078209bfcd1c73f1c60c28f56fd7d43e158e764
 > git rev-list b328169718c1ddc0e9a118ee131d2c030ec843d2 # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
MAVEN_OPTS=-Xmx3G

[EnvInject] - Variables injected successfully.
[Phoenix-4.x-HBase-1.1] $ /bin/bash -xe /tmp/hudson839160944493053472.sh
+ echo 'DELETING ~/.m2/repository/org/apache/htrace. See 
https://issues.apache.org/jira/browse/PHOENIX-1802'
DELETING ~/.m2/repository/org/apache/htrace. See 
https://issues.apache.org/jira/browse/PHOENIX-1802
+ echo 'CURRENT CONTENT:'
CURRENT CONTENT:
+ ls /home/jenkins/.m2/repository/org/apache/htrace
htrace
htrace-core
htrace-core4
FATAL: Couldn?t find any executable in /home/jenkins/tools/maven/latest3/
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Compressed 523.89 MB of artifacts by 11.6% relative to #123
Recording test results
ERROR: Step ?Publish JUnit test result report? failed: Test reports were found 
but none of them are new. Did tests run? 
For example, 

 is 9 days 11 hr old




phoenix git commit: PHOENIX-3109 Improve and fix the way we are caching column family names for local indexes in IndexMaintainer

2016-07-22 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 b32816971 -> 1078209bf


PHOENIX-3109 Improve and fix the way we are caching column family names for 
local indexes in IndexMaintainer


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1078209b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1078209b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1078209b

Branch: refs/heads/4.x-HBase-1.1
Commit: 1078209bfcd1c73f1c60c28f56fd7d43e158e764
Parents: b328169
Author: Samarth 
Authored: Fri Jul 22 14:04:17 2016 -0700
Committer: Samarth 
Committed: Fri Jul 22 14:04:17 2016 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   |  2 +-
 .../index/covered/update/ColumnReference.java   |  9 ++-
 .../index/util/ReadOnlyImmutableBytesPtr.java   | 59 --
 .../apache/phoenix/index/IndexMaintainer.java   | 64 +---
 .../apache/phoenix/schema/MetaDataClient.java   |  2 +-
 .../phoenix/index/IndexMaintainerTest.java  |  2 +-
 6 files changed, 36 insertions(+), 102 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1078209b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 8bea46b..7d3468d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -3116,7 +3116,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 invalidateList.add(new ImmutableBytesPtr(indexKey));
 }
 // If the dropped column is a covered index column, invalidate the 
index
-else if (indexMaintainer.getCoverededColumns().contains(
+else if (indexMaintainer.getCoveredColumns().contains(
 new ColumnReference(columnToDelete.getFamilyName().getBytes(), 
columnToDelete
 .getName().getBytes( {
 invalidateList.add(new ImmutableBytesPtr(indexKey));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1078209b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
index 8bd35f8..00348b3 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
@@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
-import org.apache.phoenix.hbase.index.util.ReadOnlyImmutableBytesPtr;
 
 /**
  * 
@@ -46,15 +45,15 @@ public class ColumnReference implements 
Comparable {
 private final ImmutableBytesPtr qualifierPtr;
 
 public ColumnReference(byte[] family, byte[] qualifier) {
-this.familyPtr = new ReadOnlyImmutableBytesPtr(family);
-this.qualifierPtr = new ReadOnlyImmutableBytesPtr(qualifier);
+this.familyPtr = new ImmutableBytesPtr(family);
+this.qualifierPtr = new ImmutableBytesPtr(qualifier);
 this.hashCode = calcHashCode(this.familyPtr, this.qualifierPtr);
 }
 
 public ColumnReference(byte[] family, int familyOffset, int familyLength, 
byte[] qualifier,
 int qualifierOffset, int qualifierLength) {
-this.familyPtr = new ReadOnlyImmutableBytesPtr(family, familyOffset, 
familyLength);
-this.qualifierPtr = new ReadOnlyImmutableBytesPtr(qualifier, 
qualifierOffset, qualifierLength);
+this.familyPtr = new ImmutableBytesPtr(family, familyOffset, 
familyLength);
+this.qualifierPtr = new ImmutableBytesPtr(qualifier, qualifierOffset, 
qualifierLength);
 this.hashCode = calcHashCode(this.familyPtr, this.qualifierPtr);
 }
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1078209b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/ReadOnlyImmutableBytesPtr.java
--
diff --git 

phoenix git commit: PHOENIX-3109 Improve and fix the way we are caching column family names for local indexes in IndexMaintainer

2016-07-22 Thread samarth
Repository: phoenix
Updated Branches:
  refs/heads/master 3878f3cbf -> 16d495a68


PHOENIX-3109 Improve and fix the way we are caching column family names for 
local indexes in IndexMaintainer


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/16d495a6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/16d495a6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/16d495a6

Branch: refs/heads/master
Commit: 16d495a689bab8a0c3bb2b0f06d29e9a4736f4d1
Parents: 3878f3c
Author: Samarth 
Authored: Fri Jul 22 13:45:29 2016 -0700
Committer: Samarth 
Committed: Fri Jul 22 13:45:29 2016 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   |  2 +-
 .../index/covered/update/ColumnReference.java   |  9 ++-
 .../index/util/ReadOnlyImmutableBytesPtr.java   | 59 --
 .../apache/phoenix/index/IndexMaintainer.java   | 64 +---
 .../apache/phoenix/schema/MetaDataClient.java   |  2 +-
 .../phoenix/index/IndexMaintainerTest.java  |  2 +-
 6 files changed, 36 insertions(+), 102 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/16d495a6/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 8bea46b..7d3468d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -3116,7 +3116,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 invalidateList.add(new ImmutableBytesPtr(indexKey));
 }
 // If the dropped column is a covered index column, invalidate the 
index
-else if (indexMaintainer.getCoverededColumns().contains(
+else if (indexMaintainer.getCoveredColumns().contains(
 new ColumnReference(columnToDelete.getFamilyName().getBytes(), 
columnToDelete
 .getName().getBytes( {
 invalidateList.add(new ImmutableBytesPtr(indexKey));

http://git-wip-us.apache.org/repos/asf/phoenix/blob/16d495a6/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
index 8bd35f8..00348b3 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/update/ColumnReference.java
@@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
-import org.apache.phoenix.hbase.index.util.ReadOnlyImmutableBytesPtr;
 
 /**
  * 
@@ -46,15 +45,15 @@ public class ColumnReference implements 
Comparable {
 private final ImmutableBytesPtr qualifierPtr;
 
 public ColumnReference(byte[] family, byte[] qualifier) {
-this.familyPtr = new ReadOnlyImmutableBytesPtr(family);
-this.qualifierPtr = new ReadOnlyImmutableBytesPtr(qualifier);
+this.familyPtr = new ImmutableBytesPtr(family);
+this.qualifierPtr = new ImmutableBytesPtr(qualifier);
 this.hashCode = calcHashCode(this.familyPtr, this.qualifierPtr);
 }
 
 public ColumnReference(byte[] family, int familyOffset, int familyLength, 
byte[] qualifier,
 int qualifierOffset, int qualifierLength) {
-this.familyPtr = new ReadOnlyImmutableBytesPtr(family, familyOffset, 
familyLength);
-this.qualifierPtr = new ReadOnlyImmutableBytesPtr(qualifier, 
qualifierOffset, qualifierLength);
+this.familyPtr = new ImmutableBytesPtr(family, familyOffset, 
familyLength);
+this.qualifierPtr = new ImmutableBytesPtr(qualifier, qualifierOffset, 
qualifierLength);
 this.hashCode = calcHashCode(this.familyPtr, this.qualifierPtr);
 }
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/16d495a6/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/ReadOnlyImmutableBytesPtr.java
--
diff --git 

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #129

2016-07-22 Thread Apache Jenkins Server
See 

Changes:

[ssa] PHOENIX-3078. Hive storage handler does not work if phoenix.table.name

--
[...truncated 2129 lines...]

Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.007 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
org.apache.phoenix.end2end.index.MutableIndexReplicationIT  Time elapsed: 0.004 
sec  <<< ERROR!
java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setupConfigsAndStartCluster(MutableIndexReplicationIT.java:170)
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setUpBeforeClass(MutableIndexReplicationIT.java:108)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setupConfigsAndStartCluster(MutableIndexReplicationIT.java:170)
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setUpBeforeClass(MutableIndexReplicationIT.java:108)

Running org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.005 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT  Time elapsed: 0.004 
sec  <<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT.doSetup(ReadOnlyIndexFailureIT.java:119)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT.doSetup(ReadOnlyIndexFailureIT.java:119)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT.doSetup(ReadOnlyIndexFailureIT.java:119)

Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.005 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT  Time elapsed: 0.005 sec  
<<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT.doSetup(TxWriteFailureIT.java:86)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT.doSetup(TxWriteFailureIT.java:86)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT.doSetup(TxWriteFailureIT.java:86)

Running org.apache.phoenix.execute.PartialCommitIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.005 sec <<< 
FAILURE! - in org.apache.phoenix.execute.PartialCommitIT
org.apache.phoenix.execute.PartialCommitIT  Time elapsed: 0.004 sec  <<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.execute.PartialCommitIT.doSetup(PartialCommitIT.java:92)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.execute.PartialCommitIT.doSetup(PartialCommitIT.java:92)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.execute.PartialCommitIT.doSetup(PartialCommitIT.java:92)

Tests run: 2, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 209.891 sec <<< 
FAILURE! - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT)
  Time elapsed: 209.338 sec  <<< ERROR!
java.io.IOException: Shutting down
at 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT.testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(FailForUnsupportedHBaseVersionsIT.java:121)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT.testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(FailForUnsupportedHBaseVersionsIT.java:121)

Tests run: 3, Failures: 0, Errors: 3, Skipped: 0, Time elapsed: 614.951 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.AsyncIndexIT
testAsyncIndexBuilderNonDistributed(org.apache.phoenix.end2end.index.AsyncIndexIT)
  Time elapsed: 208.345 sec  <<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.AsyncIndexIT.testAsyncIndexBuilderNonDistributed(AsyncIndexIT.java:116)
Caused by: java.io.IOException: Shutting down
at 

Apache-Phoenix | 4.x-HBase-0.98 | Build Successful

2016-07-22 Thread Apache Jenkins Server
4.x-HBase-0.98 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/4.x-HBase-0.98

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.x-HBase-0.98/lastCompletedBuild/testReport/

Changes
[ssa] PHOENIX-3078. Hive storage handler does not work if phoenix.table.name



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


Build failed in Jenkins: Phoenix | Master #1333

2016-07-22 Thread Apache Jenkins Server
See 

Changes:

[ssa] PHOENIX-3078. Hive storage handler does not work if phoenix.table.name

--
[...truncated 735 lines...]

Results :

Tests run: 1198, Failures: 0, Errors: 0, Skipped: 5

[INFO] 
[INFO] --- maven-failsafe-plugin:2.19.1:integration-test 
(HBaseManagedTimeTableReuseTest) @ phoenix-core ---

---
 T E S T S
---
Running org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.AlterSessionIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.069 sec - in 
org.apache.phoenix.end2end.AlterSessionIT
Running org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ArrayToStringFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.71 sec - in 
org.apache.phoenix.end2end.AbsFunctionEnd2EndIT
Running org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.ArrayFillFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.425 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.407 sec - in 
org.apache.phoenix.end2end.CbrtFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.115 sec - 
in org.apache.phoenix.end2end.ArrayFillFunctionIT
Running org.apache.phoenix.end2end.DecodeFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.634 sec - in 
org.apache.phoenix.end2end.ConvertTimezoneFunctionIT
Tests run: 36, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 28.126 sec - 
in org.apache.phoenix.end2end.ArrayToStringFunctionIT
Running org.apache.phoenix.end2end.DynamicFamilyIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.895 sec - in 
org.apache.phoenix.end2end.DynamicFamilyIT
Running org.apache.phoenix.end2end.DynamicUpsertIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.098 sec - in 
org.apache.phoenix.end2end.DynamicUpsertIT
Running org.apache.phoenix.end2end.FirstValueFunctionIT
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.479 sec - in 
org.apache.phoenix.end2end.DecodeFunctionIT
Running org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.095 sec - in 
org.apache.phoenix.end2end.GetSetByteBitFunctionEnd2EndIT
Running org.apache.phoenix.end2end.LikeExpressionIT
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 50.218 sec - 
in org.apache.phoenix.end2end.ArraysWithNullsIT
Running org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.DistinctPrefixFilterIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.768 sec - in 
org.apache.phoenix.end2end.LikeExpressionIT
Running org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.789 sec - in 
org.apache.phoenix.end2end.FirstValueFunctionIT
Running org.apache.phoenix.end2end.NthValueFunctionIT
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.308 sec - in 
org.apache.phoenix.end2end.MD5FunctionIT
Running org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.458 sec - in 
org.apache.phoenix.end2end.MinMaxAggregateFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.388 sec - in 
org.apache.phoenix.end2end.OctetLengthFunctionEnd2EndIT
Tests run: 26, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.327 sec - 
in org.apache.phoenix.end2end.ArithmeticQueryIT
Running org.apache.phoenix.end2end.QueryMoreIT
Running org.apache.phoenix.end2end.PrimitiveTypeIT
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.148 sec - in 
org.apache.phoenix.end2end.PrimitiveTypeIT
Running org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.RTrimFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.371 sec - in 
org.apache.phoenix.end2end.PowerFunctionEnd2EndIT
Running org.apache.phoenix.end2end.ReadOnlyIT
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.813 sec - in 
org.apache.phoenix.end2end.DistinctPrefixFilterIT
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.734 sec - in 
org.apache.phoenix.end2end.RTrimFunctionIT
Running org.apache.phoenix.end2end.ReverseFunctionIT
Running org.apache.phoenix.end2end.RegexpSplitFunctionIT
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.224 sec - in 
org.apache.phoenix.end2end.RegexpSplitFunctionIT
Running 

Build failed in Jenkins: Phoenix-4.x-HBase-1.1 #128

2016-07-22 Thread Apache Jenkins Server
See 

--
[...truncated 2123 lines...]

Running org.apache.phoenix.end2end.index.MutableIndexReplicationIT
Running org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.005 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.MutableIndexReplicationIT
org.apache.phoenix.end2end.index.MutableIndexReplicationIT  Time elapsed: 0.004 
sec  <<< ERROR!
java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setupConfigsAndStartCluster(MutableIndexReplicationIT.java:170)
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setUpBeforeClass(MutableIndexReplicationIT.java:108)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setupConfigsAndStartCluster(MutableIndexReplicationIT.java:170)
at 
org.apache.phoenix.end2end.index.MutableIndexReplicationIT.setUpBeforeClass(MutableIndexReplicationIT.java:108)

Running org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.005 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT  Time elapsed: 0.004 
sec  <<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT.doSetup(ReadOnlyIndexFailureIT.java:119)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT.doSetup(ReadOnlyIndexFailureIT.java:119)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.end2end.index.ReadOnlyIndexFailureIT.doSetup(ReadOnlyIndexFailureIT.java:119)

Running org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.009 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.txn.TxWriteFailureIT
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT  Time elapsed: 0.009 sec  
<<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT.doSetup(TxWriteFailureIT.java:86)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT.doSetup(TxWriteFailureIT.java:86)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.end2end.index.txn.TxWriteFailureIT.doSetup(TxWriteFailureIT.java:86)

Running org.apache.phoenix.execute.PartialCommitIT
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.003 sec <<< 
FAILURE! - in org.apache.phoenix.execute.PartialCommitIT
org.apache.phoenix.execute.PartialCommitIT  Time elapsed: 0.003 sec  <<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.execute.PartialCommitIT.doSetup(PartialCommitIT.java:92)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.execute.PartialCommitIT.doSetup(PartialCommitIT.java:92)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.execute.PartialCommitIT.doSetup(PartialCommitIT.java:92)

Tests run: 2, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 208.852 sec <<< 
FAILURE! - in org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT
testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT)
  Time elapsed: 208.238 sec  <<< ERROR!
java.io.IOException: Shutting down
at 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT.testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(FailForUnsupportedHBaseVersionsIT.java:121)
Caused by: java.lang.RuntimeException: Master not initialized after 20ms 
seconds
at 
org.apache.phoenix.hbase.index.FailForUnsupportedHBaseVersionsIT.testDoesNotStartRegionServerForUnsupportedCompressionAndVersion(FailForUnsupportedHBaseVersionsIT.java:121)

Tests run: 3, Failures: 0, Errors: 3, Skipped: 0, Time elapsed: 614.575 sec <<< 
FAILURE! - in org.apache.phoenix.end2end.index.AsyncIndexIT
testAsyncIndexBuilderNonDistributed(org.apache.phoenix.end2end.index.AsyncIndexIT)
  Time elapsed: 207.052 sec  <<< ERROR!
java.lang.RuntimeException: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.AsyncIndexIT.testAsyncIndexBuilderNonDistributed(AsyncIndexIT.java:116)
Caused by: java.io.IOException: Shutting down
at 
org.apache.phoenix.end2end.index.AsyncIndexIT.testAsyncIndexBuilderNonDistributed(AsyncIndexIT.java:116)
Caused by: java.lang.RuntimeException: Master not 

[4/4] phoenix git commit: PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains the schema name

2016-07-22 Thread ssa
PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains 
the schema name

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b3281697
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b3281697
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b3281697

Branch: refs/heads/4.x-HBase-1.1
Commit: b328169718c1ddc0e9a118ee131d2c030ec843d2
Parents: bfb19c1
Author: Youngwoo Kim 
Authored: Sun Jul 17 17:45:15 2016 +0900
Committer: Sergey Soldatov 
Committed: Fri Jul 22 00:10:06 2016 -0700

--
 .../src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java | 2 +-
 .../main/java/org/apache/phoenix/hive/util/PhoenixUtil.java| 6 --
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b3281697/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
index d920517..ae3675f 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
@@ -139,7 +139,7 @@ public class PhoenixMetaHook implements HiveMetaHook {
 ddl.append("  ").append(columnName).append(" 
").append(columnType).append(",\n");
 }
 }
-ddl.append("  ").append("constraint pk_").append(tableName).append(" 
primary key(")
+ddl.append("  ").append("constraint 
pk_").append(PhoenixUtil.getTableSchema(tableName.toUpperCase())[1]).append(" 
primary key(")
 .append(realRowKeys.deleteCharAt(realRowKeys.length() - 
1)).append(")\n)\n");
 
 String tableOptions = 
tableParameterMap.get(PhoenixStorageHandlerConstants

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b3281697/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index 5f9b0ba..9dcb3ef 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -71,7 +71,8 @@ public class PhoenixUtil {
 boolean exist = false;
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getTables(null, null, 
tableName.toUpperCase(), null)) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getTables(null, schemaInfo[0], 
schemaInfo[1], null)) {
 exist = rs.next();
 
 if (LOG.isDebugEnabled()) {
@@ -91,7 +92,8 @@ public class PhoenixUtil {
 Map primaryKeyColumnInfoMap = Maps.newHashMap();
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getPrimaryKeys(null, null, 
tableName.toUpperCase())) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getPrimaryKeys(null, schemaInfo[0], 
schemaInfo[1])) {
 while (rs.next()) {
 primaryKeyColumnInfoMap.put(rs.getShort("KEY_SEQ"), 
rs.getString("COLUMN_NAME"));
 }



[1/4] phoenix git commit: PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains the schema name

2016-07-22 Thread ssa
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 680990cb5 -> d744886a3
  refs/heads/4.x-HBase-1.0 314a011fe -> c70fa534d
  refs/heads/4.x-HBase-1.1 bfb19c17a -> b32816971
  refs/heads/master e864c5d2f -> 3878f3cbf


PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains 
the schema name

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3878f3cb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3878f3cb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3878f3cb

Branch: refs/heads/master
Commit: 3878f3cbfb31e2058adc32d92593a8743911569e
Parents: e864c5d
Author: Youngwoo Kim 
Authored: Sun Jul 17 17:45:15 2016 +0900
Committer: Sergey Soldatov 
Committed: Fri Jul 22 00:05:59 2016 -0700

--
 .../src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java | 2 +-
 .../main/java/org/apache/phoenix/hive/util/PhoenixUtil.java| 6 --
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3878f3cb/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
index d920517..ae3675f 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
@@ -139,7 +139,7 @@ public class PhoenixMetaHook implements HiveMetaHook {
 ddl.append("  ").append(columnName).append(" 
").append(columnType).append(",\n");
 }
 }
-ddl.append("  ").append("constraint pk_").append(tableName).append(" 
primary key(")
+ddl.append("  ").append("constraint 
pk_").append(PhoenixUtil.getTableSchema(tableName.toUpperCase())[1]).append(" 
primary key(")
 .append(realRowKeys.deleteCharAt(realRowKeys.length() - 
1)).append(")\n)\n");
 
 String tableOptions = 
tableParameterMap.get(PhoenixStorageHandlerConstants

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3878f3cb/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index 5f9b0ba..9dcb3ef 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -71,7 +71,8 @@ public class PhoenixUtil {
 boolean exist = false;
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getTables(null, null, 
tableName.toUpperCase(), null)) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getTables(null, schemaInfo[0], 
schemaInfo[1], null)) {
 exist = rs.next();
 
 if (LOG.isDebugEnabled()) {
@@ -91,7 +92,8 @@ public class PhoenixUtil {
 Map primaryKeyColumnInfoMap = Maps.newHashMap();
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getPrimaryKeys(null, null, 
tableName.toUpperCase())) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getPrimaryKeys(null, schemaInfo[0], 
schemaInfo[1])) {
 while (rs.next()) {
 primaryKeyColumnInfoMap.put(rs.getShort("KEY_SEQ"), 
rs.getString("COLUMN_NAME"));
 }



[2/4] phoenix git commit: PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains the schema name

2016-07-22 Thread ssa
PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains 
the schema name

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d744886a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d744886a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d744886a

Branch: refs/heads/4.x-HBase-0.98
Commit: d744886a33ab15a2e326cfdb839e0664aac63d1b
Parents: 680990c
Author: Youngwoo Kim 
Authored: Sun Jul 17 17:45:15 2016 +0900
Committer: Sergey Soldatov 
Committed: Fri Jul 22 00:06:24 2016 -0700

--
 .../src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java | 2 +-
 .../main/java/org/apache/phoenix/hive/util/PhoenixUtil.java| 6 --
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d744886a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
index d920517..ae3675f 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
@@ -139,7 +139,7 @@ public class PhoenixMetaHook implements HiveMetaHook {
 ddl.append("  ").append(columnName).append(" 
").append(columnType).append(",\n");
 }
 }
-ddl.append("  ").append("constraint pk_").append(tableName).append(" 
primary key(")
+ddl.append("  ").append("constraint 
pk_").append(PhoenixUtil.getTableSchema(tableName.toUpperCase())[1]).append(" 
primary key(")
 .append(realRowKeys.deleteCharAt(realRowKeys.length() - 
1)).append(")\n)\n");
 
 String tableOptions = 
tableParameterMap.get(PhoenixStorageHandlerConstants

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d744886a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index e889b63..6afe23b 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -71,7 +71,8 @@ public class PhoenixUtil {
 boolean exist = false;
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getTables(null, null, 
tableName.toUpperCase(), null)) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getTables(null, schemaInfo[0], 
schemaInfo[1], null)) {
 exist = rs.next();
 
 if (LOG.isDebugEnabled()) {
@@ -91,7 +92,8 @@ public class PhoenixUtil {
 Map primaryKeyColumnInfoMap = Maps.newHashMap();
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getPrimaryKeys(null, null, 
tableName.toUpperCase())) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getPrimaryKeys(null, schemaInfo[0], 
schemaInfo[1])) {
 while (rs.next()) {
 primaryKeyColumnInfoMap.put(rs.getShort("KEY_SEQ"), 
rs.getString("COLUMN_NAME"));
 }



[3/4] phoenix git commit: PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains the schema name

2016-07-22 Thread ssa
PHOENIX-3078. Hive storage handler does not work if phoenix.table.name contains 
the schema name

Signed-off-by: Sergey Soldatov 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c70fa534
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c70fa534
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c70fa534

Branch: refs/heads/4.x-HBase-1.0
Commit: c70fa534d0b688e55291c3bf4a800ea9ddbb0b65
Parents: 314a011
Author: Youngwoo Kim 
Authored: Sun Jul 17 17:45:15 2016 +0900
Committer: Sergey Soldatov 
Committed: Fri Jul 22 00:09:41 2016 -0700

--
 .../src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java | 2 +-
 .../main/java/org/apache/phoenix/hive/util/PhoenixUtil.java| 6 --
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c70fa534/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
index d920517..ae3675f 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixMetaHook.java
@@ -139,7 +139,7 @@ public class PhoenixMetaHook implements HiveMetaHook {
 ddl.append("  ").append(columnName).append(" 
").append(columnType).append(",\n");
 }
 }
-ddl.append("  ").append("constraint pk_").append(tableName).append(" 
primary key(")
+ddl.append("  ").append("constraint 
pk_").append(PhoenixUtil.getTableSchema(tableName.toUpperCase())[1]).append(" 
primary key(")
 .append(realRowKeys.deleteCharAt(realRowKeys.length() - 
1)).append(")\n)\n");
 
 String tableOptions = 
tableParameterMap.get(PhoenixStorageHandlerConstants

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c70fa534/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index 5f9b0ba..9dcb3ef 100644
--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -71,7 +71,8 @@ public class PhoenixUtil {
 boolean exist = false;
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getTables(null, null, 
tableName.toUpperCase(), null)) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getTables(null, schemaInfo[0], 
schemaInfo[1], null)) {
 exist = rs.next();
 
 if (LOG.isDebugEnabled()) {
@@ -91,7 +92,8 @@ public class PhoenixUtil {
 Map primaryKeyColumnInfoMap = Maps.newHashMap();
 DatabaseMetaData dbMeta = conn.getMetaData();
 
-try (ResultSet rs = dbMeta.getPrimaryKeys(null, null, 
tableName.toUpperCase())) {
+String[] schemaInfo = getTableSchema(tableName.toUpperCase());
+try (ResultSet rs = dbMeta.getPrimaryKeys(null, schemaInfo[0], 
schemaInfo[1])) {
 while (rs.next()) {
 primaryKeyColumnInfoMap.put(rs.getShort("KEY_SEQ"), 
rs.getString("COLUMN_NAME"));
 }