Build failed in Jenkins: Phoenix Compile Compatibility with HBase #707

2018-07-25 Thread Apache Jenkins Server
See 


--
Started by timer
[EnvInject] - Loading node environment variables.
Building remotely on H25 (ubuntu xenial) in workspace 

[Phoenix_Compile_Compat_wHBase] $ /bin/bash /tmp/jenkins2580419952628567133.sh
core file size  (blocks, -c) 0
data seg size   (kbytes, -d) unlimited
scheduling priority (-e) 0
file size   (blocks, -f) unlimited
pending signals (-i) 386413
max locked memory   (kbytes, -l) 64
max memory size (kbytes, -m) unlimited
open files  (-n) 6
pipe size(512 bytes, -p) 8
POSIX message queues (bytes, -q) 819200
real-time priority  (-r) 0
stack size  (kbytes, -s) 8192
cpu time   (seconds, -t) unlimited
max user processes  (-u) 10240
virtual memory  (kbytes, -v) unlimited
file locks  (-x) unlimited
core id : 0
core id : 1
core id : 2
core id : 3
core id : 4
core id : 5
physical id : 0
physical id : 1
MemTotal:   98957736 kB
MemFree:59984736 kB
Filesystem  Size  Used Avail Use% Mounted on
udev 48G 0   48G   0% /dev
tmpfs   9.5G   66M  9.4G   1% /run
/dev/sda1   364G  257G   90G  75% /
tmpfs48G 0   48G   0% /dev/shm
tmpfs   5.0M 0  5.0M   0% /run/lock
tmpfs48G 0   48G   0% /sys/fs/cgroup
tmpfs   9.5G 0  9.5G   0% /run/user/910
apache-maven-2.2.1
apache-maven-3.0.4
apache-maven-3.0.5
apache-maven-3.2.1
apache-maven-3.2.5
apache-maven-3.3.3
apache-maven-3.3.9
apache-maven-3.5.0
apache-maven-3.5.2
apache-maven-3.5.4
latest
latest2
latest3


===
Verifying compile level compatibility with HBase 0.98 with Phoenix 
4.x-HBase-0.98
===

Cloning into 'hbase'...
Switched to a new branch '0.98'
Branch 0.98 set up to track remote branch 0.98 from origin.
[ERROR] [ERROR] Some problems were encountered while processing the POMs:
[ERROR] Unresolveable build extension: Plugin 
org.apache.felix:maven-bundle-plugin:2.5.3 or one of its dependencies could not 
be resolved: Failed to read artifact descriptor for 
org.apache.felix:maven-bundle-plugin:jar:2.5.3 @ 
 @ 
[ERROR] The build could not read 1 project -> [Help 1]
[ERROR]   
[ERROR]   The project org.apache.hbase:hbase:0.98.25-SNAPSHOT 
(
 has 1 error
[ERROR] Unresolveable build extension: Plugin 
org.apache.felix:maven-bundle-plugin:2.5.3 or one of its dependencies could not 
be resolved: Failed to read artifact descriptor for 
org.apache.felix:maven-bundle-plugin:jar:2.5.3: Could not transfer artifact 
org.apache.felix:maven-bundle-plugin:pom:2.5.3 from/to central 
(https://repo.maven.apache.org/maven2): Received fatal alert: protocol_version 
-> [Help 2]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/ProjectBuildingException
[ERROR] [Help 2] 
http://cwiki.apache.org/confluence/display/MAVEN/PluginManagerException
Build step 'Execute shell' marked build as failure


Jenkins build is back to normal : Phoenix | Master #2067

2018-07-25 Thread Apache Jenkins Server
See 




Jenkins build is back to normal : Phoenix-4.x-HBase-1.3 #170

2018-07-25 Thread Apache Jenkins Server
See 




phoenix git commit: PHOENIX-4797 file not found or file exist exception when create global index use -snapshot option

2018-07-25 Thread karanmehta93
Repository: phoenix
Updated Branches:
  refs/heads/5.x-HBase-2.0 360fb805d -> f2781d438


PHOENIX-4797 file not found or file exist exception when create global index 
use -snapshot option


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f2781d43
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f2781d43
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f2781d43

Branch: refs/heads/5.x-HBase-2.0
Commit: f2781d43852557fa3a7b571047d13ab6f1530a7f
Parents: 360fb80
Author: 492066199 <492066...@qq.com>
Authored: Fri Jul 6 10:45:38 2018 +0800
Committer: Karan Mehta 
Committed: Wed Jul 25 10:17:16 2018 -0700

--
 .../org/apache/phoenix/iterate/TableSnapshotResultIterator.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2781d43/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
index 984cb84..31746ce 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -65,7 +66,8 @@ public class TableSnapshotResultIterator implements 
ResultIterator {
 this.scan = scan;
 this.scanMetricsHolder = scanMetricsHolder;
 this.scanIterator = UNINITIALIZED_SCANNER;
-this.restoreDir = new 
Path(configuration.get(PhoenixConfigurationUtil.RESTORE_DIR_KEY));
+this.restoreDir = new 
Path(configuration.get(PhoenixConfigurationUtil.RESTORE_DIR_KEY),
+UUID.randomUUID().toString());
 this.snapshotName = configuration.get(
 PhoenixConfigurationUtil.SNAPSHOT_NAME_KEY);
 this.rootDir = FSUtils.getRootDir(configuration);



[1/4] phoenix git commit: PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace table name, webapp path, column names) and traceserver.py

2018-07-25 Thread elserj
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.2 8a1925e39 -> faf4fb264
  refs/heads/4.x-HBase-1.3 13014204d -> 1f7e3206b
  refs/heads/5.x-HBase-2.0 f2781d438 -> 8a874cc95
  refs/heads/master f7927153a -> 6b363b3a2


PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace 
table name, webapp path, column names) and traceserver.py

Closes #311

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6b363b3a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6b363b3a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6b363b3a

Branch: refs/heads/master
Commit: 6b363b3a25b024539052b12169402eff78e34719
Parents: f792715
Author: Vitaly Monastyrev 
Authored: Tue Jul 24 12:14:12 2018 -0400
Committer: Josh Elser 
Committed: Wed Jul 25 10:27:54 2018 -0400

--
 bin/traceserver.py  |  6 +-
 .../apache/phoenix/tracingwebapp/http/Main.java | 13 +--
 .../tracingwebapp/http/TraceServlet.java| 87 +++-
 3 files changed, 62 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6b363b3a/bin/traceserver.py
--
diff --git a/bin/traceserver.py b/bin/traceserver.py
index 665099e..62e168c 100755
--- a/bin/traceserver.py
+++ b/bin/traceserver.py
@@ -116,8 +116,10 @@ else:
 
 #" -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n " 
+ \
 #" -XX:+UnlockCommercialFeatures -XX:+FlightRecorder 
-XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
-java_cmd = '%(java)s $PHOENIX_OPTS ' + \
-'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + 
phoenix_utils.phoenix_client_jar + \
+java_cmd = '%(java)s  ' + \
+'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + \
+phoenix_utils.phoenix_client_jar + os.pathsep + 
phoenix_utils.phoenix_queryserver_jar + \
+os.pathsep + phoenix_utils.hadoop_classpath + \
 " -Dproc_phoenixtraceserver" + \
 " -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir, 
"log4j.properties") + \
 " -Dpsql.root.logger=%(root_logger)s" + \

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6b363b3a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
index 5875fc1..249f8e6 100755
--- 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
+++ 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
@@ -47,6 +47,7 @@ public final class Main extends Configured implements Tool {
 public static final String TRACE_SERVER_HTTP_JETTY_HOME_KEY =
 "phoenix.traceserver.http.home";
 public static final String DEFAULT_HTTP_HOME = "/";
+public static final String DEFAULT_WEBAPP_DIR_LOCATION = "src/main/webapp";
 
 public static void main(String[] args) throws Exception {
 int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), 
args);
@@ -62,15 +63,17 @@ public final class Main extends Configured implements Tool {
 final String home = getConf().get(TRACE_SERVER_HTTP_JETTY_HOME_KEY,
 DEFAULT_HTTP_HOME);
 //setting up the embedded server
-ProtectionDomain domain = Main.class.getProtectionDomain();
-URL location = domain.getCodeSource().getLocation();
-String webappDirLocation = location.toString().split("target")[0] 
+"src/main/webapp";
 Server server = new Server(port);
 WebAppContext root = new WebAppContext();
 
+URL webAppDir = 
Thread.currentThread().getContextClassLoader().getResource(DEFAULT_WEBAPP_DIR_LOCATION);
+if (webAppDir == null) {
+throw new RuntimeException(String.format("No %s directory was 
found into the JAR file", DEFAULT_WEBAPP_DIR_LOCATION));
+}
+
 root.setContextPath(home);
-root.setDescriptor(webappDirLocation + "/WEB-INF/web.xml");
-root.setResourceBase(webappDirLocation);
+root.setDescriptor(DEFAULT_WEBAPP_DIR_LOCATION + "/WEB-INF/web.xml");
+root.setResourceBase(webAppDir.toURI().toString());
 root.setParentLoaderPriority(true);
 server.setHandler(root);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6b363b3a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
-

[3/4] phoenix git commit: PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace table name, webapp path, column names) and traceserver.py

2018-07-25 Thread elserj
PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace 
table name, webapp path, column names) and traceserver.py

Closes #311

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8a874cc9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8a874cc9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8a874cc9

Branch: refs/heads/5.x-HBase-2.0
Commit: 8a874cc95c3365a566453fb9ebbe8e31b6d51b38
Parents: f2781d4
Author: Vitaly Monastyrev 
Authored: Tue Jul 24 12:14:12 2018 -0400
Committer: Josh Elser 
Committed: Wed Jul 25 14:07:01 2018 -0400

--
 bin/traceserver.py  |  6 +-
 .../apache/phoenix/tracingwebapp/http/Main.java | 13 +--
 .../tracingwebapp/http/TraceServlet.java| 87 +++-
 3 files changed, 62 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a874cc9/bin/traceserver.py
--
diff --git a/bin/traceserver.py b/bin/traceserver.py
index 665099e..62e168c 100755
--- a/bin/traceserver.py
+++ b/bin/traceserver.py
@@ -116,8 +116,10 @@ else:
 
 #" -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n " 
+ \
 #" -XX:+UnlockCommercialFeatures -XX:+FlightRecorder 
-XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
-java_cmd = '%(java)s $PHOENIX_OPTS ' + \
-'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + 
phoenix_utils.phoenix_client_jar + \
+java_cmd = '%(java)s  ' + \
+'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + \
+phoenix_utils.phoenix_client_jar + os.pathsep + 
phoenix_utils.phoenix_queryserver_jar + \
+os.pathsep + phoenix_utils.hadoop_classpath + \
 " -Dproc_phoenixtraceserver" + \
 " -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir, 
"log4j.properties") + \
 " -Dpsql.root.logger=%(root_logger)s" + \

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a874cc9/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
index 5875fc1..249f8e6 100755
--- 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
+++ 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
@@ -47,6 +47,7 @@ public final class Main extends Configured implements Tool {
 public static final String TRACE_SERVER_HTTP_JETTY_HOME_KEY =
 "phoenix.traceserver.http.home";
 public static final String DEFAULT_HTTP_HOME = "/";
+public static final String DEFAULT_WEBAPP_DIR_LOCATION = "src/main/webapp";
 
 public static void main(String[] args) throws Exception {
 int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), 
args);
@@ -62,15 +63,17 @@ public final class Main extends Configured implements Tool {
 final String home = getConf().get(TRACE_SERVER_HTTP_JETTY_HOME_KEY,
 DEFAULT_HTTP_HOME);
 //setting up the embedded server
-ProtectionDomain domain = Main.class.getProtectionDomain();
-URL location = domain.getCodeSource().getLocation();
-String webappDirLocation = location.toString().split("target")[0] 
+"src/main/webapp";
 Server server = new Server(port);
 WebAppContext root = new WebAppContext();
 
+URL webAppDir = 
Thread.currentThread().getContextClassLoader().getResource(DEFAULT_WEBAPP_DIR_LOCATION);
+if (webAppDir == null) {
+throw new RuntimeException(String.format("No %s directory was 
found into the JAR file", DEFAULT_WEBAPP_DIR_LOCATION));
+}
+
 root.setContextPath(home);
-root.setDescriptor(webappDirLocation + "/WEB-INF/web.xml");
-root.setResourceBase(webappDirLocation);
+root.setDescriptor(DEFAULT_WEBAPP_DIR_LOCATION + "/WEB-INF/web.xml");
+root.setResourceBase(webAppDir.toURI().toString());
 root.setParentLoaderPriority(true);
 server.setHandler(root);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a874cc9/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java

[2/4] phoenix git commit: PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace table name, webapp path, column names) and traceserver.py

2018-07-25 Thread elserj
PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace 
table name, webapp path, column names) and traceserver.py

Closes #311

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/faf4fb26
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/faf4fb26
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/faf4fb26

Branch: refs/heads/4.x-HBase-1.2
Commit: faf4fb264aa89da0197a34e402cb3f6bde5ab153
Parents: 8a1925e
Author: Vitaly Monastyrev 
Authored: Tue Jul 24 12:14:12 2018 -0400
Committer: Josh Elser 
Committed: Wed Jul 25 13:55:10 2018 -0400

--
 bin/traceserver.py  |  6 +-
 .../apache/phoenix/tracingwebapp/http/Main.java | 13 +--
 .../tracingwebapp/http/TraceServlet.java| 87 +++-
 3 files changed, 62 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/faf4fb26/bin/traceserver.py
--
diff --git a/bin/traceserver.py b/bin/traceserver.py
index 665099e..62e168c 100755
--- a/bin/traceserver.py
+++ b/bin/traceserver.py
@@ -116,8 +116,10 @@ else:
 
 #" -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n " 
+ \
 #" -XX:+UnlockCommercialFeatures -XX:+FlightRecorder 
-XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
-java_cmd = '%(java)s $PHOENIX_OPTS ' + \
-'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + 
phoenix_utils.phoenix_client_jar + \
+java_cmd = '%(java)s  ' + \
+'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + \
+phoenix_utils.phoenix_client_jar + os.pathsep + 
phoenix_utils.phoenix_queryserver_jar + \
+os.pathsep + phoenix_utils.hadoop_classpath + \
 " -Dproc_phoenixtraceserver" + \
 " -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir, 
"log4j.properties") + \
 " -Dpsql.root.logger=%(root_logger)s" + \

http://git-wip-us.apache.org/repos/asf/phoenix/blob/faf4fb26/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
index 5875fc1..249f8e6 100755
--- 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
+++ 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
@@ -47,6 +47,7 @@ public final class Main extends Configured implements Tool {
 public static final String TRACE_SERVER_HTTP_JETTY_HOME_KEY =
 "phoenix.traceserver.http.home";
 public static final String DEFAULT_HTTP_HOME = "/";
+public static final String DEFAULT_WEBAPP_DIR_LOCATION = "src/main/webapp";
 
 public static void main(String[] args) throws Exception {
 int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), 
args);
@@ -62,15 +63,17 @@ public final class Main extends Configured implements Tool {
 final String home = getConf().get(TRACE_SERVER_HTTP_JETTY_HOME_KEY,
 DEFAULT_HTTP_HOME);
 //setting up the embedded server
-ProtectionDomain domain = Main.class.getProtectionDomain();
-URL location = domain.getCodeSource().getLocation();
-String webappDirLocation = location.toString().split("target")[0] 
+"src/main/webapp";
 Server server = new Server(port);
 WebAppContext root = new WebAppContext();
 
+URL webAppDir = 
Thread.currentThread().getContextClassLoader().getResource(DEFAULT_WEBAPP_DIR_LOCATION);
+if (webAppDir == null) {
+throw new RuntimeException(String.format("No %s directory was 
found into the JAR file", DEFAULT_WEBAPP_DIR_LOCATION));
+}
+
 root.setContextPath(home);
-root.setDescriptor(webappDirLocation + "/WEB-INF/web.xml");
-root.setResourceBase(webappDirLocation);
+root.setDescriptor(DEFAULT_WEBAPP_DIR_LOCATION + "/WEB-INF/web.xml");
+root.setResourceBase(webAppDir.toURI().toString());
 root.setParentLoaderPriority(true);
 server.setHandler(root);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/faf4fb26/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java

[4/4] phoenix git commit: PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace table name, webapp path, column names) and traceserver.py

2018-07-25 Thread elserj
PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace 
table name, webapp path, column names) and traceserver.py

Closes #311

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1f7e3206
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1f7e3206
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1f7e3206

Branch: refs/heads/4.x-HBase-1.3
Commit: 1f7e3206bc0b1039b4b8a5fa9a2c8d06eb64e9d3
Parents: 1301420
Author: Vitaly Monastyrev 
Authored: Tue Jul 24 12:14:12 2018 -0400
Committer: Josh Elser 
Committed: Wed Jul 25 14:29:27 2018 -0400

--
 bin/traceserver.py  |  6 +-
 .../apache/phoenix/tracingwebapp/http/Main.java | 13 +--
 .../tracingwebapp/http/TraceServlet.java| 87 +++-
 3 files changed, 62 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1f7e3206/bin/traceserver.py
--
diff --git a/bin/traceserver.py b/bin/traceserver.py
index 665099e..62e168c 100755
--- a/bin/traceserver.py
+++ b/bin/traceserver.py
@@ -116,8 +116,10 @@ else:
 
 #" -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n " 
+ \
 #" -XX:+UnlockCommercialFeatures -XX:+FlightRecorder 
-XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
-java_cmd = '%(java)s $PHOENIX_OPTS ' + \
-'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + 
phoenix_utils.phoenix_client_jar + \
+java_cmd = '%(java)s  ' + \
+'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + \
+phoenix_utils.phoenix_client_jar + os.pathsep + 
phoenix_utils.phoenix_queryserver_jar + \
+os.pathsep + phoenix_utils.hadoop_classpath + \
 " -Dproc_phoenixtraceserver" + \
 " -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir, 
"log4j.properties") + \
 " -Dpsql.root.logger=%(root_logger)s" + \

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1f7e3206/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
index 5875fc1..249f8e6 100755
--- 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
+++ 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
@@ -47,6 +47,7 @@ public final class Main extends Configured implements Tool {
 public static final String TRACE_SERVER_HTTP_JETTY_HOME_KEY =
 "phoenix.traceserver.http.home";
 public static final String DEFAULT_HTTP_HOME = "/";
+public static final String DEFAULT_WEBAPP_DIR_LOCATION = "src/main/webapp";
 
 public static void main(String[] args) throws Exception {
 int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), 
args);
@@ -62,15 +63,17 @@ public final class Main extends Configured implements Tool {
 final String home = getConf().get(TRACE_SERVER_HTTP_JETTY_HOME_KEY,
 DEFAULT_HTTP_HOME);
 //setting up the embedded server
-ProtectionDomain domain = Main.class.getProtectionDomain();
-URL location = domain.getCodeSource().getLocation();
-String webappDirLocation = location.toString().split("target")[0] 
+"src/main/webapp";
 Server server = new Server(port);
 WebAppContext root = new WebAppContext();
 
+URL webAppDir = 
Thread.currentThread().getContextClassLoader().getResource(DEFAULT_WEBAPP_DIR_LOCATION);
+if (webAppDir == null) {
+throw new RuntimeException(String.format("No %s directory was 
found into the JAR file", DEFAULT_WEBAPP_DIR_LOCATION));
+}
+
 root.setContextPath(home);
-root.setDescriptor(webappDirLocation + "/WEB-INF/web.xml");
-root.setResourceBase(webappDirLocation);
+root.setDescriptor(DEFAULT_WEBAPP_DIR_LOCATION + "/WEB-INF/web.xml");
+root.setResourceBase(webAppDir.toURI().toString());
 root.setParentLoaderPriority(true);
 server.setHandler(root);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/1f7e3206/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #399

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 73046   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #400

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 67066   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #401

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 71871   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46220)   remote: Co

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #402

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 69732   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46220)   remote: Co

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #403

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 69994   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46220)   remote: Co

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #404

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 72704   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46220)   remote: Co

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #405

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 69995   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46220)   remote: Co

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #406

2018-07-25 Thread Apache Jenkins Server
See 

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H24 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
ERROR: Error fetching remote repo 'origin'
hudson.plugins.git.GitException: Failed to fetch from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
at hudson.plugins.git.GitSCM.fetchFrom(GitSCM.java:888)
at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1155)
at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1186)
at hudson.scm.SCM.checkout(SCM.java:504)
at hudson.model.AbstractProject.checkout(AbstractProject.java:1208)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:574)
at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:499)
at hudson.model.Run.execute(Run.java:1794)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:429)
Caused by: hudson.plugins.git.GitException: Command "git fetch --tags 
--progress https://git-wip-us.apache.org/repos/asf/phoenix.git 
+refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: error: Could not read 4cab4c270e91b48b4ebd19487986cfd864e02476
error: Could not read bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
error: Could not read 766248ba95cb3a60dc5f3acd9b26d6dc9276d0e1
error: Could not read f46e8bbcd48674f7de217feeac2679a593b042be
error: Could not read 7f4730ed7d433b4d00abd1d28305f4fbbdf1e8f8
remote: Counting objects: 60609   remote: Counting objects: 98032, 
done.
remote: Compressing objects:   0% (1/46220)   remote: Compressing 
objects:   1% (463/46220)   remote: Compressing objects:   2% 
(925/46220)   remote: Compressing objects:   3% (1387/46220)   
remote: Compressing objects:   4% (1849/46220)   remote: Compressing 
objects:   5% (2311/46220)   remote: Compressing objects:   6% 
(2774/46220)   remote: Compressing objects:   7% (3236/46220)   
remote: Compressing objects:   8% (3698/46220)   remote: Compressing 
objects:   9% (4160/46220)   remote: Compressing objects:  10% 
(4622/46220)   remote: Compressing objects:  11% (5085/46220)   
remote: Compressing objects:  12% (5547/46220)   remote: Compressing 
objects:  13% (6009/46220)   remote: Compressing objects:  14% 
(6471/46220)   remote: Compressing objects:  15% (6933/46220)   
remote: Compressing objects:  16% (7396/46220)   remote: Compressing 
objects:  17% (7858/46220)   remote: Compressing objects:  18% 
(8320/46220)   remote: Compressing objects:  19% (8782/46220)   
remote: Compressing objects:  20% (9244/46220)   remote: Compressing 
objects:  21% (9707/46220)   remote: Compressing objects:  22% 
(10169/46220)   remote: Compressing objects:  23% (10631/46220) 
  remote: Compressing objects:  24% (11093/46220)   remote: Compressing 
objects:  25% (11555/46220)   remote: Compressing objects:  26% 
(12018/46220)   remote: Compressing objects:  27% (12480/46220) 
  remote: Compressing objects:  28% (12942/46220)   remote: Compressing 
objects:  29% (13404/46220)   remote: Compressing objects:  30% 
(13866/46220)   remote: Compressing objects:  31% (14329/46220) 
  remote: Compressing objects:  32% (14791/46220)   remote: Compressing 
objects:  33% (15253/46220)   remote: Compressing objects:  34% 
(15715/46220)   remote: Compressing objects:  35% (16177/46220) 
  remote: Compressing objects:  36% (16640/46220)   remote: Compressing 
objects:  37% (17102/46220)   remote: Compressing objects:  38% 
(17564/46220)   remote: Compressing objects:  39% (18026/46220) 
  remote: Compressing objects:  40% (18488/46220)   remote: Compressing 
objects:  41% (18951/46220)   remote: Compressing objects:  42% 
(19413/46220)   remote: Compressing objects:  43% (19875/46220) 
  remote: Compressing objects:  44% (20337/46220)   remote: Co

Build failed in Jenkins: Phoenix-4.x-HBase-1.2 #407

2018-07-25 Thread Apache Jenkins Server
See 


Changes:

[jamestaylor] PHOENIX-4789 Exception when setting TTL on Tephra transactional 
table

[ankitsinghal59] PHOENIX-4785 Unable to write to table if index is made active 
during

[jamestaylor] PHOENIX-4790 Simplify check for client side delete

[jamestaylor] PHOENIX-3383 Comparison between descending row keys used in RVC is

[jamestaylor] PHOENIX-4790 Addendum to check that query is a point lookup for 
delete

[karanmehta93] PHOENIX-4805 Move Avatica version to 1.12 for PQS

[elserj] PHOENIX-4809 Only cache PhoenixConnections when lease renewal is on

[jamestaylor] Revert "PHOENIX-4790 Addendum to check that query is a point 
lookup for

[jamestaylor] Revert "PHOENIX-4790 Simplify check for client side delete"

[tdsilva] PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva

[vincentpoon] PHOENIX-4818 Fix RAT check for missing licenses

[k.mehta] PHOENIX-4797 file not found or file exist exception when create global

[elserj] PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null,

--
Started by an SCM change
[EnvInject] - Loading node environment variables.
Building remotely on H23 (ubuntu xenial) in workspace 

 > git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git # timeout=10
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version # timeout=10
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git rev-parse origin/4.x-HBase-1.2^{commit} # timeout=10
Checking out Revision faf4fb264aa89da0197a34e402cb3f6bde5ab153 
(origin/4.x-HBase-1.2)
 > git config core.sparsecheckout # timeout=10
 > git checkout -f faf4fb264aa89da0197a34e402cb3f6bde5ab153
Commit message: "PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed 
check null, trace table name, webapp path, column names) and traceserver.py"
 > git rev-list --no-walk 179bea2c186e1286fe7492423751e748d21afc5c # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the 
SCM step.
[EnvInject] - Injecting as environment variables the properties content 
MAVEN_OPTS=-Xmx3G

[EnvInject] - Variables injected successfully.
[Phoenix-4.x-HBase-1.2] $ /bin/bash -xe /tmp/jenkins2467312164619778657.sh
+ echo 'DELETING ~/.m2/repository/org/apache/htrace. See 
https://issues.apache.org/jira/browse/PHOENIX-1802'
DELETING ~/.m2/repository/org/apache/htrace. See 
https://issues.apache.org/jira/browse/PHOENIX-1802
+ echo 'CURRENT CONTENT:'
CURRENT CONTENT:
+ ls /home/jenkins/.m2/repository/org/apache/htrace
htrace
htrace-core
htrace-core4
[Phoenix-4.x-HBase-1.2] $ /home/jenkins/tools/maven/latest3/bin/mvn -U clean 
install -Dcheckstyle.skip=true
[INFO] Scanning for projects...
Downloading from central: 
https://repo.maven.apache.org/maven2/org/apache/felix/maven-bundle-plugin/2.5.3/maven-bundle-plugin-2.5.3.pom
[ERROR] [ERROR] Some problems were encountered while processing the POMs:
[ERROR] Unresolveable build extension: Plugin 
org.apache.felix:maven-bundle-plugin:2.5.3 or one of its dependencies could not 
be resolved: Failed to read artifact descriptor for 
org.apache.felix:maven-bundle-plugin:jar:2.5.3 @ 
[WARNING] Reporting configuration should be done in  section, not in 
maven-site-plugin  as reportPlugins parameter. @ line 467, 
column 24
 @ 
[ERROR] The build could not read 1 project -> [Help 1]
[ERROR]   
[ERROR]   The project org.apache.phoenix:phoenix:4.14.0-HBase-1.2 
( has 1 error
[ERROR] Unresolveable build extension: Plugin 
org.apache.felix:maven-bundle-plugin:2.5.3 or one of its dependencies could not 
be resolved: Failed to read artifact descriptor for 
org.apache.felix:maven-bundle-plugin:jar:2.5.3: Could not transfer artifact 
org.apache.felix:maven-bundle-plugin:pom:2.5.3 from/to central 
(https://repo.maven.apache.org/maven2): Received fatal alert: protocol_version 
-> [Help 2]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/ProjectBuildingException
[ERROR] [Help 2] 
http://cwiki.apache.org/confluence/display/MAVEN/PluginManagerException
Build step 'Invoke top-level Maven targets' marked build as failure
Archiving artifacts
Recording test results
ERROR: Step ?Publish JUnit test result report? failed: Test reports were found 
but none of them are new. Did leaf

[03/50] [abbrv] phoenix git commit: PHOENIX-4762 Performance regression with transactional immutable indexes

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeea6c60/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
index 158f1e8..28dc87a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
@@ -398,5 +398,21 @@ public class TephraTransactionContext implements 
PhoenixTransactionContext {
 this.addTransactionAware(transactionAwareHTable);
 return transactionAwareHTable;
 }
+
+@Override
+public Table getTransactionalTableWriter(Table htable, PTable table) {
+boolean isIndex = table.getType() == PTableType.INDEX;
+TransactionAwareHTable transactionAwareHTable = new 
TransactionAwareHTable(htable, table.isImmutableRows() || isIndex ? 
TxConstants.ConflictDetection.NONE : TxConstants.ConflictDetection.ROW);
+// Don't add immutable indexes (those are the only ones that would 
participate
+// during a commit), as we don't need conflict detection for these.
+if (isIndex) {
+transactionAwareHTable.startTx(getTransaction());
+} else {
+// Even for immutable, we need to do this so that an abort has the 
state
+// necessary to generate the rows to delete.
+this.addTransactionAware(transactionAwareHTable);
+}
+return transactionAwareHTable;
+}
 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeea6c60/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
index c6cbe3e..3d4c3ca 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Mutation;
@@ -116,7 +115,6 @@ import org.apache.phoenix.schema.types.PDecimal;
 import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.schema.types.PVarchar;
-import org.apache.phoenix.transaction.PhoenixTransactionContext;
 
 import com.google.common.collect.Lists;
 
@@ -269,32 +267,6 @@ public class IndexUtil {
 .getLength()) == 0);
 }
 
-public static List generateDeleteIndexData(final PTable table, 
PTable index,
-List dataMutations, ImmutableBytesWritable ptr, final 
KeyValueBuilder kvBuilder, PhoenixConnection connection)
-throws SQLException {
-try {
-IndexMaintainer maintainer = index.getIndexMaintainer(table, 
connection);
-List indexMutations = 
Lists.newArrayListWithExpectedSize(dataMutations.size());
-for (final Mutation dataMutation : dataMutations) {
-long ts = MetaDataUtil.getClientTimeStamp(dataMutation);
-ptr.set(dataMutation.getRow());
-byte[] regionStartKey = null;
-byte[] regionEndkey = null;
-if(maintainer.isLocalIndex()) {
-HRegionLocation tableRegionLocation = 
connection.getQueryServices().getTableRegionLocation(table.getPhysicalName().getBytes(),
 dataMutation.getRow());
-regionStartKey = 
tableRegionLocation.getRegion().getStartKey();
-regionEndkey = tableRegionLocation.getRegion().getEndKey();
-}
-Delete delete = maintainer.buildDeleteMutation(kvBuilder, 
null, ptr, Collections.emptyList(), ts, regionStartKey, regionEndkey);
-
delete.setAttribute(PhoenixTransactionContext.TX_ROLLBACK_ATTRIBUTE_KEY, 
dataMutation.getAttribute(PhoenixTransactionContext.TX_ROLLBACK_ATTRIBUTE_KEY));
-indexMutations.add(delete);
-}
-return indexMutations;
-} catch (IOException e) {
-throw new SQLException(e);
-}
-}
-
 public static List generateIndexData(final PTable table, PTable 
index,
 final MultiRowMutationState multiRowMutationState, List 
dataMutations, final KeyValueBuilder kvBuilder, PhoenixConnection connection)
 throws SQLException {



[21/50] [abbrv] phoenix git commit: PHOENIX-4785 Unable to write to table if index is made active during retry

2018-07-25 Thread elserj
PHOENIX-4785 Unable to write to table if index is made active during retry


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/56318da6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/56318da6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/56318da6

Branch: refs/heads/master
Commit: 56318da6206df749c91877275f1b8e7dec8d848d
Parents: 2dd6310
Author: Ankit Singhal 
Authored: Thu Jun 21 16:30:16 2018 -0700
Committer: Ankit Singhal 
Committed: Thu Jun 21 16:30:16 2018 -0700

--
 .../end2end/index/MutableIndexFailureIT.java| 125 ++-
 .../MutableIndexFailureWithNamespaceIT.java |  79 
 .../coprocessor/MetaDataEndpointImpl.java   |  30 +
 .../index/PhoenixIndexFailurePolicy.java|  71 ++-
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   1 +
 5 files changed, 272 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/56318da6/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
index 0daf80f..dbda4e8 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
@@ -28,10 +28,16 @@ import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -104,10 +110,10 @@ public class MutableIndexFailureIT extends BaseTest {
 private final boolean throwIndexWriteFailure;
 private String schema = generateUniqueName();
 private List exceptions = Lists.newArrayList();
-private static RegionCoprocessorEnvironment 
indexRebuildTaskRegionEnvironment;
-private static final int forwardOverlapMs = 1000;
-private static final int disableTimestampThresholdMs = 1;
-private static final int numRpcRetries = 2;
+protected static RegionCoprocessorEnvironment 
indexRebuildTaskRegionEnvironment;
+protected static final int forwardOverlapMs = 1000;
+protected static final int disableTimestampThresholdMs = 1;
+protected static final int numRpcRetries = 2;
 
 public MutableIndexFailureIT(boolean transactional, boolean localIndex, 
boolean isNamespaceMapped, Boolean disableIndexOnWriteFailure, boolean 
failRebuildTask, Boolean throwIndexWriteFailure) {
 this.transactional = transactional;
@@ -127,6 +133,20 @@ public class MutableIndexFailureIT extends BaseTest {
 
 @BeforeClass
 public static void doSetup() throws Exception {
+Map serverProps = getServerProps();
+Map clientProps = Maps.newHashMapWithExpectedSize(2);
+clientProps.put(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "2");
+NUM_SLAVES_BASE = 4;
+setUpTestDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), 
new ReadOnlyProps(clientProps.entrySet().iterator()));
+indexRebuildTaskRegionEnvironment = getUtility()
+
.getRSForFirstRegionInTable(PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME)
+
.getRegions(PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME).get(0).getCoprocessorHost()
+
.findCoprocessorEnvironment(MetaDataRegionObserver.class.getName());
+MetaDataRegionObserver.initRebuildIndexConnectionProps(
+indexRebuildTaskRegionEnvironment.getConfiguration());
+}
+
+protected static Map getServerProps(){
 Map serverProps = Maps.newHashMapWithExpectedSize(10);
 serverProps.put("hbase.coprocessor.region.classes", 
FailingRegionObserver.class.getName());
 serverProps.put(HConstants.HBASE_RPC_TIMEOUT_KEY, "1");
@@ -142,19 +162,7 @@ public class MutableIndexFailureIT extends BaseTest {
  * because we want to control it's execution ourselves
  */
 serverProps.put(QueryServices.INDEX_REBUILD_TASK_INITIAL_DELAY, 
Long.toString(Long.MAX_VALUE));
-Map clientProps = Maps.newHashMapWithExpectedSize(2);
-clientProps.put(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "2");
-NUM_SLAVES_BASE = 4;
-   

[46/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
index 472331b..e39d492 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
@@ -21,6 +21,8 @@ import static 
org.apache.phoenix.exception.SQLExceptionCode.CANNOT_MUTATE_TABLE;
 import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -33,9 +35,13 @@ import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collection;
-import java.util.Properties;
+import java.util.List;
 
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.TephraTransactionalProcessor;
@@ -43,27 +49,32 @@ import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.ColumnNotFoundException;
+import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTableType;
-import org.apache.phoenix.util.PropertiesUtil;
-import org.apache.phoenix.util.StringUtil;
-import org.apache.phoenix.util.TestUtil;
+import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.IndexUtil;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.SchemaUtil;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+
 @RunWith(Parameterized.class)
-public class AlterTableWithViewsIT extends ParallelStatsDisabledIT {
-
+public class AlterTableWithViewsIT extends SplitSystemCatalogIT {
+
 private final boolean isMultiTenant;
 private final boolean columnEncoded;
-
-private final String TENANT_SPECIFIC_URL1 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=tenant1";
-private final String TENANT_SPECIFIC_URL2 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=tenant2";
+private final String TENANT_SPECIFIC_URL1 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=" + TENANT1;
+private final String TENANT_SPECIFIC_URL2 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=" + TENANT2;
 
 public AlterTableWithViewsIT(boolean isMultiTenant, boolean columnEncoded) 
{
 this.isMultiTenant = isMultiTenant;
@@ -77,6 +88,14 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
 { true, false }, { true, true } });
 }
 
+// transform PColumn to String
+private Function function = new Function(){
+@Override
+public String apply(PColumn input) {
+return input.getName().getString();
+}
+};
+
 private String generateDDL(String format) {
 return generateDDL("", format);
 }
@@ -101,8 +120,9 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
 public void testAddNewColumnsToBaseTableWithViews() throws Exception {
 try (Connection conn = DriverManager.getConnection(getUrl());
 Connection viewConn = isMultiTenant ? 
DriverManager.getConnection(TENANT_SPECIFIC_URL1) : conn ) {   
-String tableName = generateUniqueName();
-String viewOfTable = tableName + "_VIEW";
+String tableName = SchemaUtil.getTableName(SCHEMA1, 
generateUniqueName());
+String viewOfTable = SchemaUtil.getTableName(SCHEMA2, 
generateUniqueName());
+
 String ddlFormat = "CREATE TABLE IF NOT EXISTS " + tableName + " ("
 + " %s ID char(1) NOT NULL,"
 + " COL1 integer NOT NULL,"
@@ -113,12 +133,13 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
 assertTableDefinition(conn, table

[17/50] [abbrv] phoenix git commit: PHOENIX-4528 PhoenixAccessController checks permissions only at table level when creating views(Karan Mehta)

2018-07-25 Thread elserj
PHOENIX-4528 PhoenixAccessController checks permissions only at table level 
when creating views(Karan Mehta)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/61affd43
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/61affd43
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/61affd43

Branch: refs/heads/master
Commit: 61affd431b8c4a1730804f0c0d5a0035b797e178
Parents: 16fa7f6
Author: Rajeshbabu Chintaguntla 
Authored: Fri Jun 15 10:38:05 2018 -0700
Committer: Rajeshbabu Chintaguntla 
Committed: Fri Jun 15 10:38:05 2018 -0700

--
 .../phoenix/end2end/BasePermissionsIT.java  |  4 +
 .../phoenix/end2end/ChangePermissionsIT.java| 26 +-
 .../coprocessor/PhoenixAccessController.java| 95 +---
 3 files changed, 92 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/61affd43/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
index 9f91267..7698fca 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java
@@ -748,6 +748,10 @@ public class BasePermissionsIT extends BaseTest {
 }
 }
 
+String surroundWithDoubleQuotes(String input) {
+return "\"" + input + "\"";
+}
+
 void validateAccessDeniedException(AccessDeniedException ade) {
 String msg = ade.getMessage();
 assertTrue("Exception contained unexpected message: '" + msg + "'",

http://git-wip-us.apache.org/repos/asf/phoenix/blob/61affd43/phoenix-core/src/it/java/org/apache/phoenix/end2end/ChangePermissionsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ChangePermissionsIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ChangePermissionsIT.java
index 0d764d8..106438f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ChangePermissionsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ChangePermissionsIT.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.security.User;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.SchemaUtil;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -144,7 +145,7 @@ public class ChangePermissionsIT extends BasePermissionsIT {
 verifyAllowed(createSchema(SCHEMA_NAME), superUser1);
 verifyAllowed(grantPermissions("C", regularUser1, SCHEMA_NAME, 
true), superUser1);
 } else {
-verifyAllowed(grantPermissions("C", regularUser1, "\"" + 
QueryConstants.HBASE_DEFAULT_SCHEMA_NAME + "\"", true), superUser1);
+verifyAllowed(grantPermissions("C", regularUser1, 
surroundWithDoubleQuotes(QueryConstants.HBASE_DEFAULT_SCHEMA_NAME), true), 
superUser1);
 }
 
 // Create new table. Create indexes, views and view indexes on top of 
it. Verify the contents by querying it
@@ -235,7 +236,7 @@ public class ChangePermissionsIT extends BasePermissionsIT {
 verifyAllowed(createSchema(SCHEMA_NAME), superUser1);
 verifyAllowed(grantPermissions("C", regularUser1, SCHEMA_NAME, 
true), superUser1);
 } else {
-verifyAllowed(grantPermissions("C", regularUser1, "\"" + 
QueryConstants.HBASE_DEFAULT_SCHEMA_NAME + "\"", true), superUser1);
+verifyAllowed(grantPermissions("C", regularUser1, 
surroundWithDoubleQuotes(QueryConstants.HBASE_DEFAULT_SCHEMA_NAME), true), 
superUser1);
 }
 
 // Create MultiTenant Table (View Index Table should be automatically 
created)
@@ -266,4 +267,25 @@ public class ChangePermissionsIT extends BasePermissionsIT 
{
 verifyAllowed(readMultiTenantTableWithIndex(VIEW1_TABLE_NAME, "o1"), 
regularUser2);
 verifyAllowed(readMultiTenantTableWithoutIndex(VIEW2_TABLE_NAME, 
"o2"), regularUser2);
 }
+
+/**
+ * Grant RX permissions on the schema to regularUser1,
+ * Creating view on a table with that schema by regularUser1 should be 
allowed
+ */
+@Test
+public void testCreateViewOnTableWithRXPermsOnSchema() throws Exception {
+
+startNewMiniCluster();
+grantSystemTableAccess(superUser1, regularUser1, regularUser2, 
regularUser3);
+
+if(isNamespaceMapped) {
+verifyAllowed(createSchema(SCHEMA_NAME), superUser1);
+ 

[19/50] [abbrv] phoenix git commit: PHOENIX-4789 Exception when setting TTL on Tephra transactional table

2018-07-25 Thread elserj
PHOENIX-4789 Exception when setting TTL on Tephra transactional table


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/43da29f4
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/43da29f4
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/43da29f4

Branch: refs/heads/master
Commit: 43da29f4777f0f9ac9f5373bafe3c2e4643e26a5
Parents: 8cceea6
Author: James Taylor 
Authored: Mon Jun 18 15:00:02 2018 +0200
Committer: James Taylor 
Committed: Mon Jun 18 15:54:21 2018 +0200

--
 .../org/apache/phoenix/tx/TransactionIT.java| 29 
 .../query/ConnectionQueryServicesImpl.java  |  3 ++
 2 files changed, 32 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/43da29f4/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
index baaac8d..2cf15d2 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -36,9 +37,11 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Properties;
 
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -55,6 +58,7 @@ import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.StringUtil;
 import org.apache.phoenix.util.TestUtil;
+import org.apache.tephra.TxConstants;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -371,4 +375,29 @@ public class TransactionIT  extends 
ParallelStatsDisabledIT {
 conn.close();
 }
 }
+
+private static void assertTTL(Admin admin, String tableName, int ttl) 
throws Exception {
+TableDescriptor tableDesc = 
admin.getTableDescriptor(TableName.valueOf(tableName));
+for (ColumnFamilyDescriptor colDesc : tableDesc.getColumnFamilies()) {
+
assertEquals(ttl,Integer.parseInt(Bytes.toString(colDesc.getValue(Bytes.toBytes(TxConstants.PROPERTY_TTL);
+
assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_TTL,colDesc.getTimeToLive());
+}
+}
+
+@Test
+public void testSetTTL() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+TransactionFactory.Provider txProvider = 
TransactionFactory.Provider.valueOf(this.txProvider);
+try (Connection conn = DriverManager.getConnection(getUrl(), props); 
Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+String tableName = generateUniqueName();
+conn.createStatement().execute("CREATE TABLE " + tableName + 
+"(K VARCHAR PRIMARY KEY) 
TRANSACTIONAL=true,TRANSACTION_PROVIDER='" + txProvider + "',TTL=100");
+assertTTL(admin, tableName, 100);
+tableName = generateUniqueName();
+conn.createStatement().execute("CREATE TABLE " + tableName + 
+"(K VARCHAR PRIMARY KEY) 
TRANSACTIONAL=true,TRANSACTION_PROVIDER='" + txProvider + "'");
+conn.createStatement().execute("ALTER TABLE " + tableName + " SET 
TTL=" + 200);
+assertTTL(admin, tableName, 200);
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/43da29f4/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index f1ab653..3c232aa 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -2262,6 +2262,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices imple

[20/50] [abbrv] phoenix git commit: Revert "PHOENIX-4768 Re-enable testCompactUpdatesStats and testCompactUpdatesStatsWithMinStatsUpdateFreq of StatsCollectorIT"

2018-07-25 Thread elserj
Revert "PHOENIX-4768 Re-enable testCompactUpdatesStats and 
testCompactUpdatesStatsWithMinStatsUpdateFreq of StatsCollectorIT"

This reverts commit 09c017e3cc8a2d1ec4ce27c65a5c76b71de138a0.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2dd6310e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2dd6310e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2dd6310e

Branch: refs/heads/master
Commit: 2dd6310ea1057f2cb46361817e2b3a3ecb1887e7
Parents: 43da29f
Author: Ankit Singhal 
Authored: Thu Jun 21 14:41:36 2018 -0700
Committer: Ankit Singhal 
Committed: Thu Jun 21 14:41:36 2018 -0700

--
 .../org/apache/phoenix/schema/stats/StatsCollectorIT.java| 8 ++--
 1 file changed, 6 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dd6310e/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
index c2325ae..3af0d09 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
@@ -68,6 +69,7 @@ import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -403,11 +405,13 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 }
 
 @Test
+@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStats() throws Exception {
 testCompactUpdatesStats(0, fullTableName);
 }
 
 @Test
+@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStatsWithMinStatsUpdateFreq() throws 
Exception {
 
testCompactUpdatesStats(QueryServicesOptions.DEFAULT_STATS_UPDATE_FREQ_MS, 
fullTableName);
 }
@@ -460,7 +464,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 Scan scan = new Scan();
 scan.setRaw(true);
 PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {
@@ -473,7 +477,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 scan = new Scan();
 scan.setRaw(true);
 phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {



[36/50] [abbrv] phoenix git commit: Revert "PHOENIX-4790 Addendum to check that query is a point lookup for delete not to run query"

2018-07-25 Thread elserj
Revert "PHOENIX-4790 Addendum to check that query is a point lookup for delete 
not to run query"

This reverts commit ab930f49762e5ccf3f4ac108a2402552753f0d6a.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2b43bea0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2b43bea0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2b43bea0

Branch: refs/heads/master
Commit: 2b43bea01e6231ca675cc5d681aedb83daa3c1e0
Parents: f46e8bb
Author: James Taylor 
Authored: Thu Jul 12 20:09:44 2018 -0700
Committer: James Taylor 
Committed: Thu Jul 12 20:09:44 2018 -0700

--
 .../src/main/java/org/apache/phoenix/compile/DeleteCompiler.java  | 3 ---
 1 file changed, 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2b43bea0/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 34c1590..f4e8896 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -542,9 +542,6 @@ public class DeleteCompiler {
 Iterator iterator = queryPlans.iterator();
 while (iterator.hasNext()) {
 QueryPlan plan = iterator.next();
-// Must be a point lookup in order to not run a query since
-// we have to have the full key be enumerated.
-noQueryReqd &= plan.getContext().getScanRanges().isPointLookup();
 if (plan.getTableRef().getTable().getIndexType() == 
IndexType.LOCAL) {
 if (!plan.getContext().getDataColumns().isEmpty()) {
 iterator.remove();



[44/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 72dd26f..558b92e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-  

[41/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 0bd1f8c..874a382 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -94,8 +94,10 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 // TODO Was there a system table upgrade?
 // TODO Need to account for the inevitable 4.14 release too
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_5_0_0 = 
MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0;
+public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_15_0 = 
MIN_TABLE_TIMESTAMP + 29;
+public static final long MIN_SYSTEM_TABLE_TIMESTAMP_5_1_0 = 
MIN_SYSTEM_TABLE_TIMESTAMP_4_15_0;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
-public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 
MIN_SYSTEM_TABLE_TIMESTAMP_5_0_0;
+public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 
MIN_SYSTEM_TABLE_TIMESTAMP_5_1_0;
 
 // Version below which we should disallow usage of mutable secondary 
indexing.
 public static final int MUTABLE_SI_VERSION_THRESHOLD = 
VersionUtil.encodeVersion("0", "94", "10");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java
new file mode 100644
index 000..b1c5f65
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.coprocessor;
+
+import java.util.Arrays;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.util.SchemaUtil;
+
+public class TableInfo {
+
+private final byte[] tenantId;
+private final byte[] schema;
+private final byte[] name;
+
+public TableInfo(byte[] tenantId, byte[] schema, byte[] name) {
+this.tenantId = tenantId;
+this.schema = schema;
+this.name = name;
+}
+
+public byte[] getRowKeyPrefix() {
+return SchemaUtil.getTableKey(tenantId, schema, name);
+}
+
+@Override
+public String toString() {
+return Bytes.toStringBinary(getRowKeyPrefix());
+}
+
+public byte[] getTenantId() {
+return tenantId;
+}
+
+public byte[] getSchemaName() {
+return schema;
+}
+
+public byte[] getTableName() {
+return name;
+}
+
+@Override
+public int hashCode() {
+final int prime = 31;
+int result = 1;
+result = prime * result + Arrays.hashCode(name);
+result = prime * result + Arrays.hashCode(schema);
+result = prime * result + Arrays.hashCode(tenantId);
+return result;
+}
+
+@Override
+public boolean equals(Object obj) {
+if (this == obj) return true;
+if (obj == null) return false;
+if (getClass() != obj.getClass()) return false;
+TableInfo other = (TableInfo) obj;
+if (!Arrays.equals(name, other.name)) return false;
+if (!Arrays.equals(schema, other.schema)) return false;
+if (!Arrays.equals(tenantId, other.tenantId)) return false;
+return true;
+}
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableViewFinderResult.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableViewFinderResult.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableViewFi

[26/50] [abbrv] phoenix git commit: PHOENIX-4795 Fix failing pherf tests in 5.x branch(Rajeshbabu)

2018-07-25 Thread elserj
PHOENIX-4795 Fix failing pherf tests in 5.x branch(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2d44700f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2d44700f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2d44700f

Branch: refs/heads/master
Commit: 2d44700fac405abd017f0201ff4a4fa814204b1f
Parents: cb962f9
Author: Rajeshbabu Chintaguntla 
Authored: Tue Jun 26 06:43:09 2018 -0700
Committer: Rajeshbabu Chintaguntla 
Committed: Tue Jun 26 06:43:09 2018 -0700

--
 phoenix-pherf/src/test/resources/hbase-site.xml | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2d44700f/phoenix-pherf/src/test/resources/hbase-site.xml
--
diff --git a/phoenix-pherf/src/test/resources/hbase-site.xml 
b/phoenix-pherf/src/test/resources/hbase-site.xml
index 4972828..d4bebb7 100644
--- a/phoenix-pherf/src/test/resources/hbase-site.xml
+++ b/phoenix-pherf/src/test/resources/hbase-site.xml
@@ -22,4 +22,8 @@
 phoenix.query.threadPoolSize
 128
 
+
+   hbase.localcluster.assign.random.ports
+   true
+
 



[50/50] [abbrv] phoenix git commit: PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace table name, webapp path, column names) and traceserver.py

2018-07-25 Thread elserj
PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null, trace 
table name, webapp path, column names) and traceserver.py

Closes #311

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8a874cc9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8a874cc9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8a874cc9

Branch: refs/heads/master
Commit: 8a874cc95c3365a566453fb9ebbe8e31b6d51b38
Parents: f2781d4
Author: Vitaly Monastyrev 
Authored: Tue Jul 24 12:14:12 2018 -0400
Committer: Josh Elser 
Committed: Wed Jul 25 14:07:01 2018 -0400

--
 bin/traceserver.py  |  6 +-
 .../apache/phoenix/tracingwebapp/http/Main.java | 13 +--
 .../tracingwebapp/http/TraceServlet.java| 87 +++-
 3 files changed, 62 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a874cc9/bin/traceserver.py
--
diff --git a/bin/traceserver.py b/bin/traceserver.py
index 665099e..62e168c 100755
--- a/bin/traceserver.py
+++ b/bin/traceserver.py
@@ -116,8 +116,10 @@ else:
 
 #" -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n " 
+ \
 #" -XX:+UnlockCommercialFeatures -XX:+FlightRecorder 
-XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
-java_cmd = '%(java)s $PHOENIX_OPTS ' + \
-'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + 
phoenix_utils.phoenix_client_jar + \
+java_cmd = '%(java)s  ' + \
+'-cp ' + hbase_config_path + os.pathsep + 
phoenix_utils.phoenix_traceserver_jar + os.pathsep + \
+phoenix_utils.phoenix_client_jar + os.pathsep + 
phoenix_utils.phoenix_queryserver_jar + \
+os.pathsep + phoenix_utils.hadoop_classpath + \
 " -Dproc_phoenixtraceserver" + \
 " -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir, 
"log4j.properties") + \
 " -Dpsql.root.logger=%(root_logger)s" + \

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a874cc9/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
index 5875fc1..249f8e6 100755
--- 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
+++ 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/Main.java
@@ -47,6 +47,7 @@ public final class Main extends Configured implements Tool {
 public static final String TRACE_SERVER_HTTP_JETTY_HOME_KEY =
 "phoenix.traceserver.http.home";
 public static final String DEFAULT_HTTP_HOME = "/";
+public static final String DEFAULT_WEBAPP_DIR_LOCATION = "src/main/webapp";
 
 public static void main(String[] args) throws Exception {
 int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), 
args);
@@ -62,15 +63,17 @@ public final class Main extends Configured implements Tool {
 final String home = getConf().get(TRACE_SERVER_HTTP_JETTY_HOME_KEY,
 DEFAULT_HTTP_HOME);
 //setting up the embedded server
-ProtectionDomain domain = Main.class.getProtectionDomain();
-URL location = domain.getCodeSource().getLocation();
-String webappDirLocation = location.toString().split("target")[0] 
+"src/main/webapp";
 Server server = new Server(port);
 WebAppContext root = new WebAppContext();
 
+URL webAppDir = 
Thread.currentThread().getContextClassLoader().getResource(DEFAULT_WEBAPP_DIR_LOCATION);
+if (webAppDir == null) {
+throw new RuntimeException(String.format("No %s directory was 
found into the JAR file", DEFAULT_WEBAPP_DIR_LOCATION));
+}
+
 root.setContextPath(home);
-root.setDescriptor(webappDirLocation + "/WEB-INF/web.xml");
-root.setResourceBase(webappDirLocation);
+root.setDescriptor(DEFAULT_WEBAPP_DIR_LOCATION + "/WEB-INF/web.xml");
+root.setResourceBase(webAppDir.toURI().toString());
 root.setParentLoaderPriority(true);
 server.setHandler(root);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a874cc9/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
--
diff --git 
a/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
 
b/phoenix-tracing-webapp/src/main/java/org/apache/phoenix/tracingwebapp/http/TraceServlet.java
index c

[06/50] [abbrv] phoenix git commit: PHOENIX-4769 Annotate SystemCatalogIT so that it will run with the test suite.

2018-07-25 Thread elserj
PHOENIX-4769 Annotate SystemCatalogIT so that it will run with the test suite.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f8a1f1ec
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f8a1f1ec
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f8a1f1ec

Branch: refs/heads/master
Commit: f8a1f1ece77a9684b72d04a2d25148cb044e56ce
Parents: c489587
Author: Ankit Singhal 
Authored: Fri Jun 1 14:12:22 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 1 14:12:22 2018 -0700

--
 .../org/apache/phoenix/end2end/SystemCatalogIT.java| 13 ++---
 1 file changed, 10 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f8a1f1ec/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemCatalogIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemCatalogIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemCatalogIT.java
index 9c66b9a..2c7b992 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemCatalogIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SystemCatalogIT.java
@@ -25,16 +25,18 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.Properties;
 
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.junit.After;
-import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
-@Ignore
-public class SystemCatalogIT {
+@Category(NeedsOwnMiniClusterTest.class)
+public class SystemCatalogIT extends BaseTest {
 private HBaseTestingUtility testUtil = null;
 
 @After
@@ -59,12 +61,17 @@ public class SystemCatalogIT {
 RegionLocator rl = 
testUtil.getConnection().getRegionLocator(systemCatalog);
 assertEquals(rl.getAllRegionLocations().size(), 1);
 
+try{
 // now attempt to split SYSTEM.CATALOG
 testUtil.getAdmin().split(systemCatalog);
 
 // make sure the split finishes (there's no synchronous splitting 
before HBase 2.x)
 testUtil.getAdmin().disableTable(systemCatalog);
 testUtil.getAdmin().enableTable(systemCatalog);
+}catch(DoNotRetryIOException e){
+//table is not splittable
+assert(e.getMessage().contains("NOT splittable"));
+}
 
 // test again... Must still be exactly one region.
 rl = testUtil.getConnection().getRegionLocator(systemCatalog);



[18/50] [abbrv] phoenix git commit: PHOENIX-4786 Reduce log level to debug when logging new aggregate row key found and added results for scan ordered queries(Rajeshbabu)

2018-07-25 Thread elserj
PHOENIX-4786 Reduce log level to debug when logging new aggregate row key found 
and added results for scan ordered queries(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8cceea62
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8cceea62
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8cceea62

Branch: refs/heads/master
Commit: 8cceea6214297659e79d89f762c07349f84b74e9
Parents: 61affd4
Author: Rajeshbabu Chintaguntla 
Authored: Fri Jun 15 15:41:21 2018 -0700
Committer: Rajeshbabu Chintaguntla 
Committed: Fri Jun 15 15:41:21 2018 -0700

--
 .../phoenix/coprocessor/GroupedAggregateRegionObserver.java  | 8 
 1 file changed, 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8cceea62/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
index 1ded543..e58407f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
@@ -542,14 +542,6 @@ public class GroupedAggregateRegionObserver extends 
BaseScannerRegionObserver im
 currentKey.getLength(), SINGLE_COLUMN_FAMILY, 
SINGLE_COLUMN,
 AGG_TIMESTAMP, value, 0, value.length);
 results.add(keyValue);
-if (logger.isInfoEnabled()) {
-logger.info(LogUtil.addCustomAnnotations("Adding new 
aggregate row: "
-+ keyValue
-+ ",for current key "
-+ Bytes.toStringBinary(currentKey.get(), 
currentKey.getOffset(),
-currentKey.getLength()) + ", aggregated 
values: "
-+ Arrays.asList(rowAggregators), 
ScanUtil.getCustomAnnotations(scan)));
-}
 // If we're at an aggregation boundary, reset the
 // aggregators and
 // aggregate with the current result (which is not a part 
of



[09/50] [abbrv] phoenix git commit: PHOENIX-4768 Re-enable testCompactUpdatesStats and testCompactUpdatesStatsWithMinStatsUpdateFreq of StatsCollectorIT

2018-07-25 Thread elserj
PHOENIX-4768 Re-enable testCompactUpdatesStats and 
testCompactUpdatesStatsWithMinStatsUpdateFreq of StatsCollectorIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/09c017e3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/09c017e3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/09c017e3

Branch: refs/heads/master
Commit: 09c017e3cc8a2d1ec4ce27c65a5c76b71de138a0
Parents: 3df50d8
Author: Ankit Singhal 
Authored: Fri Jun 1 14:17:58 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 1 14:17:58 2018 -0700

--
 .../org/apache/phoenix/schema/stats/StatsCollectorIT.java| 8 ++--
 1 file changed, 2 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/09c017e3/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
index 3af0d09..c2325ae 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
@@ -69,7 +68,6 @@ import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -405,13 +403,11 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 }
 
 @Test
-@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStats() throws Exception {
 testCompactUpdatesStats(0, fullTableName);
 }
 
 @Test
-@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStatsWithMinStatsUpdateFreq() throws 
Exception {
 
testCompactUpdatesStats(QueryServicesOptions.DEFAULT_STATS_UPDATE_FREQ_MS, 
fullTableName);
 }
@@ -464,7 +460,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 Scan scan = new Scan();
 scan.setRaw(true);
 PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {
@@ -477,7 +473,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 scan = new Scan();
 scan.setRaw(true);
 phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {



[14/50] [abbrv] phoenix git commit: PHOENIX-4772 phoenix.sequence.saltBuckets is not honoured

2018-07-25 Thread elserj
PHOENIX-4772 phoenix.sequence.saltBuckets is not honoured


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/abcf0d1a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/abcf0d1a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/abcf0d1a

Branch: refs/heads/master
Commit: abcf0d1ab3a85f8aa9bc5f5f5d54f6b229cfa247
Parents: 1b18d34
Author: Ankit Singhal 
Authored: Thu Jun 7 11:02:55 2018 -0700
Committer: Ankit Singhal 
Committed: Thu Jun 7 11:02:55 2018 -0700

--
 .../org/apache/phoenix/end2end/SequenceIT.java  |  57 +-
 .../phoenix/end2end/SequencePointInTimeIT.java  | 112 +++
 .../query/ConnectionQueryServicesImpl.java  |  13 ++-
 .../query/ConnectionlessQueryServicesImpl.java  |   8 +-
 .../apache/phoenix/query/QueryConstants.java|   2 +
 .../org/apache/phoenix/schema/Sequence.java |   6 +-
 6 files changed, 139 insertions(+), 59 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/abcf0d1a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
index 4cc9628..b76cc4e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
@@ -18,6 +18,8 @@
 
 package org.apache.phoenix.end2end;
 
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TYPE_SEQUENCE;
 import static 
org.apache.phoenix.query.QueryServicesTestImpl.DEFAULT_SEQUENCE_CACHE_SIZE;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
@@ -38,6 +40,7 @@ import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesTestImpl;
 import org.apache.phoenix.schema.SchemaNotFoundException;
 import org.apache.phoenix.schema.SequenceAlreadyExistsException;
 import org.apache.phoenix.schema.SequenceNotFoundException;
@@ -202,6 +205,8 @@ public class SequenceIT extends ParallelStatsDisabledIT {
 String schemaName = getSchemaName(sequenceName);
 
 conn.createStatement().execute("CREATE SEQUENCE " + sequenceName + " 
START WITH 2 INCREMENT BY 4");
+int bucketNum = PhoenixRuntime.getTableNoCache(conn, 
SYSTEM_CATALOG_SCHEMA + "." + TYPE_SEQUENCE).getBucketNum();
+assertEquals("Salt bucket for SYSTEM.SEQUENCE should be test 
default",bucketNum , QueryServicesTestImpl.DEFAULT_SEQUENCE_TABLE_SALT_BUCKETS);
 String query = "SELECT sequence_schema, sequence_name, current_value, 
increment_by FROM \"SYSTEM\".\"SEQUENCE\" WHERE sequence_name='" + 
sequenceNameWithoutSchema + "'";
 ResultSet rs = conn.prepareStatement(query).executeQuery();
 assertTrue(rs.next());
@@ -1406,56 +1411,4 @@ public class SequenceIT extends ParallelStatsDisabledIT {
return tableName.substring(tableName.indexOf(".") + 1, 
tableName.length());
 }
 
-@Test
-public void testPointInTimeSequence() throws Exception {
-String seqName = generateSequenceNameWithSchema(); 
-Properties scnProps = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-scnProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(EnvironmentEdgeManager.currentTimeMillis()));
-Connection beforeSeqConn = DriverManager.getConnection(getUrl(), 
scnProps);
-
-ResultSet rs;
-Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-Connection conn = DriverManager.getConnection(getUrl(), props);
-conn.createStatement().execute("CREATE SEQUENCE " + seqName + "");
-
-try {
-beforeSeqConn.createStatement().executeQuery("SELECT next value 
for " + seqName);
-fail();
-} catch (SequenceNotFoundException e) {
-beforeSeqConn.close();
-}
-
-scnProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(EnvironmentEdgeManager.currentTimeMillis()));
-Connection afterSeqConn = DriverManager.getConnection(getUrl(), 
scnProps);
-
-rs = conn.createStatement().executeQuery("SELECT next value for " + 
seqName);
-assertTrue(rs.next());
-assertEquals(1, rs.getInt(1));
-rs = conn.createStatement().executeQuery("SELECT next value for " + 
seqName);
-assertTrue(rs.next());
-assertEquals(2, rs.getInt(1));
-
-conn.createStatement().execute("

[11/50] [abbrv] phoenix git commit: PHOENIX-4758 Validate that HADOOP_CONF_DIR is not set for HiveMRIT

2018-07-25 Thread elserj
PHOENIX-4758 Validate that HADOOP_CONF_DIR is not set for HiveMRIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/44c1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/44c1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/44c1

Branch: refs/heads/master
Commit: 44c19a4e9b23d71a7abfed5250286e8560aa
Parents: 3b1c0d3
Author: Josh Elser 
Authored: Tue May 29 14:14:04 2018 -0400
Committer: Josh Elser 
Committed: Mon Jun 4 12:33:17 2018 -0400

--
 .../src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java | 8 
 1 file changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/44c1/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 6e880ef..c866921 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -18,6 +18,10 @@
 
 package org.apache.phoenix.hive;
 
+import static org.junit.Assert.fail;
+
+import java.util.Map;
+
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -29,6 +33,10 @@ public class HiveMapReduceIT extends HivePhoenixStoreIT {
 
 @BeforeClass
 public static void setUpBeforeClass() throws Exception {
+final String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
+if (hadoopConfDir != null && hadoopConfDir.length() != 0) {
+fail("HADOOP_CONF_DIR is non-empty in the current shell 
environment which will very likely cause this test to fail.");
+}
 setup(HiveTestUtil.MiniClusterType.mr);
 }
 



[04/50] [abbrv] phoenix git commit: PHOENIX-4762 Performance regression with transactional immutable indexes

2018-07-25 Thread elserj
PHOENIX-4762 Performance regression with transactional immutable indexes


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/eeea6c60
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/eeea6c60
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/eeea6c60

Branch: refs/heads/master
Commit: eeea6c60f299de73d0ebfd83b779f19476aa1fa9
Parents: d79c300
Author: James Taylor 
Authored: Fri Jun 1 09:03:21 2018 -0700
Committer: James Taylor 
Committed: Fri Jun 1 10:57:28 2018 -0700

--
 .../phoenix/end2end/index/MutableIndexIT.java   |   8 +-
 .../apache/phoenix/execute/MutationState.java   | 685 ++-
 .../PhoenixTxIndexMutationGenerator.java|   2 +-
 .../transaction/OmidTransactionContext.java |   6 +
 .../transaction/PhoenixTransactionContext.java  |   6 +
 .../transaction/TephraTransactionContext.java   |  16 +
 .../java/org/apache/phoenix/util/IndexUtil.java |  28 -
 7 files changed, 379 insertions(+), 372 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeea6c60/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
index 87bc124..12e0dbf 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
@@ -34,6 +34,8 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Properties;
 
+import jline.internal.Log;
+
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Put;
@@ -70,8 +72,6 @@ import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.primitives.Doubles;
 
-import jline.internal.Log;
-
 @RunWith(Parameterized.class)
 public class MutableIndexIT extends ParallelStatsDisabledIT {
 
@@ -852,7 +852,7 @@ public class MutableIndexIT extends ParallelStatsDisabledIT 
{
   List regions = 
getUtility().getHBaseCluster().getRegions(hbaseTN);
   HRegion hRegion = regions.get(0);
   hRegion.flush(true);
-  HStore store = (HStore) hRegion.getStore(famBytes);
+  HStore store = hRegion.getStore(famBytes);
   store.triggerMajorCompaction();
   store.compactRecentForTestingAssumingDefaultPolicy(1);
 
@@ -860,7 +860,7 @@ public class MutableIndexIT extends ParallelStatsDisabledIT 
{
   regions = 
getUtility().getHBaseCluster().getRegions(TableName.valueOf(PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME));
   hRegion = regions.get(0);
   hRegion.flush(true);
-  store = (HStore) 
hRegion.getStore(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES);
+  store = hRegion.getStore(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES);
   store.triggerMajorCompaction();
   store.compactRecentForTestingAssumingDefaultPolicy(1);
   }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eeea6c60/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index 1d662ab..c3df314 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -83,7 +83,6 @@ import org.apache.phoenix.schema.PMetaData;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PRow;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTableRef;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeySchema;
@@ -117,9 +116,7 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 /**
- * 
  * Tracks the uncommitted state
- *
  */
 public class MutationState implements SQLCloseable {
 private static final Logger logger = 
LoggerFactory.getLogger(MutationState.class);
@@ -151,30 +148,34 @@ public class MutationState implements SQLCloseable {
 this(maxSize, maxSizeBytes, connection, false, null);
 }
 
-public MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection, PhoenixTransactionContext txContext) {
+public MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection,
+PhoenixTransactionContext txContext) {
 this(maxSize, maxSizeBytes,

[07/50] [abbrv] phoenix git commit: PHOENIX-4769 Annotate SystemCatalogIT so that it will run with the test suite

2018-07-25 Thread elserj
PHOENIX-4769 Annotate SystemCatalogIT so that it will run with the test suite


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3a7c9033
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3a7c9033
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3a7c9033

Branch: refs/heads/master
Commit: 3a7c9033936779f5290f4fabdd4912c27f7af64f
Parents: f8a1f1e
Author: Ankit Singhal 
Authored: Fri Jun 1 14:13:41 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 1 14:13:41 2018 -0700

--
 .../org/apache/phoenix/schema/stats/StatsCollectorIT.java| 8 ++--
 1 file changed, 2 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3a7c9033/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
index 3af0d09..c2325ae 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
@@ -69,7 +68,6 @@ import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -405,13 +403,11 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 }
 
 @Test
-@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStats() throws Exception {
 testCompactUpdatesStats(0, fullTableName);
 }
 
 @Test
-@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStatsWithMinStatsUpdateFreq() throws 
Exception {
 
testCompactUpdatesStats(QueryServicesOptions.DEFAULT_STATS_UPDATE_FREQ_MS, 
fullTableName);
 }
@@ -464,7 +460,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 Scan scan = new Scan();
 scan.setRaw(true);
 PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {
@@ -477,7 +473,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 scan = new Scan();
 scan.setRaw(true);
 phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {



[33/50] [abbrv] phoenix git commit: PHOENIX-3383 Comparison between descending row keys used in RVC is reverse

2018-07-25 Thread elserj
PHOENIX-3383 Comparison between descending row keys used in RVC is reverse


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b91d7b0d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b91d7b0d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b91d7b0d

Branch: refs/heads/master
Commit: b91d7b0d07c0a37b4d3617867225584b51895264
Parents: 35366b3
Author: James Taylor 
Authored: Thu Jul 5 21:38:28 2018 -0700
Committer: James Taylor 
Committed: Wed Jul 11 07:39:35 2018 -0700

--
 .../org/apache/phoenix/end2end/QueryMoreIT.java |   12 +-
 .../org/apache/phoenix/compile/KeyPart.java |2 -
 .../org/apache/phoenix/compile/ScanRanges.java  |   77 +-
 .../apache/phoenix/compile/WhereOptimizer.java  | 1304 +-
 .../PhoenixTxIndexMutationGenerator.java|2 +-
 .../expression/function/FunctionExpression.java |   10 +-
 .../expression/function/InvertFunction.java |   19 +-
 .../expression/function/PrefixFunction.java |6 +-
 .../expression/function/RTrimFunction.java  |6 +-
 .../function/RoundDateExpression.java   |   22 +-
 .../function/RoundDecimalExpression.java|7 +-
 .../phoenix/iterate/BaseResultIterators.java|4 +-
 .../apache/phoenix/iterate/ExplainTable.java|   10 -
 .../java/org/apache/phoenix/query/KeyRange.java |   28 +-
 .../org/apache/phoenix/schema/RowKeySchema.java |   78 ++
 .../phoenix/compile/QueryCompilerTest.java  |2 +-
 .../phoenix/compile/QueryOptimizerTest.java |5 +-
 .../TenantSpecificViewIndexCompileTest.java |8 +-
 .../phoenix/compile/WhereOptimizerTest.java |  359 -
 .../RoundFloorCeilExpressionsTest.java  |   59 +-
 .../apache/phoenix/query/KeyRangeClipTest.java  |2 +-
 .../org/apache/phoenix/query/QueryPlanTest.java |8 +-
 .../apache/phoenix/schema/RowKeySchemaTest.java |   48 +
 23 files changed, 1567 insertions(+), 511 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b91d7b0d/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
index 9109c12..04272fa 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
@@ -372,9 +372,6 @@ public class QueryMoreIT extends ParallelStatsDisabledIT {
 }
 }
 
-// FIXME: this repros PHOENIX-3382, but turned up two more issues:
-// 1) PHOENIX-3383 Comparison between descending row keys used in RVC is 
reverse
-// 2) PHOENIX-3384 Optimize RVC expressions for non leading row key columns
 @Test
 public void testRVCOnDescWithLeadingPKEquality() throws Exception {
 final Connection conn = DriverManager.getConnection(getUrl());
@@ -398,14 +395,11 @@ public class QueryMoreIT extends ParallelStatsDisabledIT {
 conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES ('org1',1,'02')");
 conn.commit();
 
-// FIXME: PHOENIX-3383
-// This comparison is really backwards: it should be (score, 
entity_id) < (2, '04'),
-// but because we're matching a descending key, our comparison has to 
be switched.
 try (Statement stmt = conn.createStatement()) {
 final ResultSet rs = stmt.executeQuery("SELECT entity_id, score\n" 
+ 
 "FROM " + fullTableName + "\n" + 
 "WHERE organization_id = 'org1'\n" + 
-"AND (score, entity_id) > (2, '04')\n" + 
+"AND (score, entity_id) < (2, '04')\n" + 
 "ORDER BY score DESC, entity_id DESC\n" + 
 "LIMIT 3");
 assertTrue(rs.next());
@@ -416,13 +410,11 @@ public class QueryMoreIT extends ParallelStatsDisabledIT {
 assertEquals(1.0, rs.getDouble(2), 0.001);
 assertFalse(rs.next());
 }
-// FIXME: PHOENIX-3384
-// It should not be necessary to specify organization_id in this query
 try (Statement stmt = conn.createStatement()) {
 final ResultSet rs = stmt.executeQuery("SELECT entity_id, score\n" 
+ 
 "FROM " + fullTableName + "\n" + 
 "WHERE organization_id = 'org1'\n" + 
-"AND (organization_id, score, entity_id) > ('org1', 2, 
'04')\n" + 
+"AND (organization_id, score, entity_id) < ('org1', 2, 
'04')\n" + 
 "ORDER BY score DESC, entity_id DESC\n" + 
 "LIMIT 3");
 assertTrue(rs.next());

http://git-wip-us.

[12/50] [abbrv] phoenix git commit: PHOENIX-4773 Move HTable rollback wrapper into Tephra TAL method

2018-07-25 Thread elserj
PHOENIX-4773 Move HTable rollback wrapper into Tephra TAL method


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5aebc96b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5aebc96b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5aebc96b

Branch: refs/heads/master
Commit: 5aebc96b6882f2b4a050b184547e98b27f500da5
Parents: 44c1aaa
Author: James Taylor 
Authored: Mon Jun 4 20:27:36 2018 -0700
Committer: James Taylor 
Committed: Tue Jun 5 22:32:59 2018 -0700

--
 .../apache/phoenix/cache/ServerCacheClient.java |  23 +--
 .../apache/phoenix/execute/HashJoinPlan.java|   7 +-
 .../apache/phoenix/execute/MutationState.java   | 196 ++-
 .../PhoenixTxIndexMutationGenerator.java|  42 
 .../phoenix/index/IndexMetaDataCacheClient.java |  67 ++-
 .../apache/phoenix/join/HashCacheClient.java|   5 +-
 .../transaction/OmidTransactionContext.java |   3 +-
 .../transaction/PhoenixTransactionContext.java  |   5 +-
 .../transaction/TephraTransactionContext.java   |  92 -
 .../java/org/apache/phoenix/util/IndexUtil.java |   8 +
 10 files changed, 237 insertions(+), 211 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5aebc96b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index 4ecf83b..0d2a302 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -70,7 +70,6 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
-import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.Closeables;
 import org.apache.phoenix.util.SQLCloseable;
 import org.apache.phoenix.util.SQLCloseables;
@@ -90,7 +89,7 @@ public class ServerCacheClient {
 private static final Random RANDOM = new Random();
public static final String HASH_JOIN_SERVER_CACHE_RESEND_PER_SERVER = 
"hash.join.server.cache.resend.per.server";
 private final PhoenixConnection connection;
-private final Map cacheUsingTableRefMap = new 
ConcurrentHashMap();
+private final Map cacheUsingTableMap = new 
ConcurrentHashMap();
 
 /**
  * Construct client used to create a serialized cached snapshot of a table 
and send it to each region server
@@ -220,12 +219,12 @@ public class ServerCacheClient {
 }
 
 public ServerCache addServerCache(ScanRanges keyRanges, final 
ImmutableBytesWritable cachePtr, final byte[] txState,
-final ServerCacheFactory cacheFactory, final TableRef 
cacheUsingTableRef) throws SQLException {
-return addServerCache(keyRanges, cachePtr, txState, cacheFactory, 
cacheUsingTableRef, false);
+final ServerCacheFactory cacheFactory, final PTable 
cacheUsingTable) throws SQLException {
+return addServerCache(keyRanges, cachePtr, txState, cacheFactory, 
cacheUsingTable, false);
 }
 
 public ServerCache addServerCache(ScanRanges keyRanges, final 
ImmutableBytesWritable cachePtr, final byte[] txState,
-final ServerCacheFactory cacheFactory, final TableRef 
cacheUsingTableRef, boolean storeCacheOnClient)
+final ServerCacheFactory cacheFactory, final PTable 
cacheUsingTable, boolean storeCacheOnClient)
 throws SQLException {
 ConnectionQueryServices services = connection.getQueryServices();
 List closeables = new ArrayList();
@@ -241,7 +240,6 @@ public class ServerCacheClient {
 ExecutorService executor = services.getExecutor();
 List> futures = Collections.emptyList();
 try {
-final PTable cacheUsingTable = cacheUsingTableRef.getTable();
 List locations = 
services.getAllTableRegions(cacheUsingTable.getPhysicalName().getBytes());
 int nRegions = locations.size();
 // Size these based on worst case
@@ -257,8 +255,8 @@ public class ServerCacheClient {
 // Call RPC once per server
 servers.add(entry);
 if (LOG.isDebugEnabled()) 
{LOG.debug(addCustomAnnotations("Adding cache entry to be sent for " + entry, 
connection));}
-final byte[] key = 
getKeyInRegion(entry.getRegion().getStartKey());
-final Table htable = 
services.getTable(cacheUsingTableRef.getTable().getPhysicalName().getBytes());
+final byte[] key = 
getKeyI

[01/50] [abbrv] phoenix git commit: PHOENIX-4724 Efficient Equi-Depth histogram for streaming data

2018-07-25 Thread elserj
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.4 [created] 6b363b3a2


PHOENIX-4724 Efficient Equi-Depth histogram for streaming data


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cb17adbb
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cb17adbb
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cb17adbb

Branch: refs/heads/4.x-HBase-1.4
Commit: cb17adbbde56cacd43846ead2200e6606ed64ae8
Parents: b63b8e9
Author: Vincent Poon 
Authored: Thu May 3 17:07:27 2018 -0700
Committer: Vincent Poon 
Committed: Fri May 18 10:39:10 2018 -0700

--
 .../phoenix/util/EquiDepthStreamHistogram.java  | 453 +++
 .../util/EquiDepthStreamHistogramTest.java  | 303 +
 2 files changed, 756 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cb17adbb/phoenix-core/src/main/java/org/apache/phoenix/util/EquiDepthStreamHistogram.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/EquiDepthStreamHistogram.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/util/EquiDepthStreamHistogram.java
new file mode 100644
index 000..7649933
--- /dev/null
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/util/EquiDepthStreamHistogram.java
@@ -0,0 +1,453 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+/**
+ * Equi-Depth histogram based on 
http://web.cs.ucla.edu/~zaniolo/papers/Histogram-EDBT2011-CamReady.pdf,
+ * but without the sliding window - we assume a single window over the entire 
data set.
+ *
+ * Used to generate the bucket boundaries of a histogram where each bucket has 
the same # of items.
+ * This is useful, for example, for pre-splitting an index table, by feeding 
in data from the indexed column.
+ * Works on streaming data - the histogram is dynamically updated for each new 
value.
+ *
+ * Add values by calling addValue(), then at the end computeBuckets() can be 
called to get
+ * the buckets with their bounds.
+ *
+ * Average time complexity: O(log(B x p) + (B x p)/T) = nearly constant
+ * B = number of buckets, p = expansion factor constant, T = # of values
+ *
+ * Space complexity: different from paper since here we keep the blocked bars 
but don't have expiration,
+ *  comes out to basically O(log(T))
+ */
+public class EquiDepthStreamHistogram {
+private static final Log LOG = 
LogFactory.getLog(EquiDepthStreamHistogram.class);
+
+// used in maxSize calculation for each bar
+private static final double MAX_COEF = 1.7;
+// higher expansion factor = better accuracy and worse performance
+private static final short DEFAULT_EXPANSION_FACTOR = 7;
+private int numBuckets;
+private int maxBars;
+@VisibleForTesting
+long totalCount; // number of values - i.e. count across all bars
+@VisibleForTesting
+List bars;
+
+/**
+ * Create a new histogram
+ * @param numBuckets number of buckets, which can be used to get the splits
+ */
+public EquiDepthStreamHistogram(int numBuckets) {
+this(numBuckets, DEFAULT_EXPANSION_FACTOR);
+}
+
+/**
+ * @param numBuckets number of buckets
+ * @param expansionFactor number of bars = expansionFactor * numBuckets
+ * The more bars, the better the accuracy, at the cost of worse performance
+ */
+public EquiDepthStreamHistogram(int numBuckets, int expansionFactor) {
+this.numBuckets = numBuckets;
+this.maxBars = numBuckets * expansionFactor;
+this.bars = new ArrayList<>(maxBars);
+}
+
+/**
+ * Add a new 

[02/50] [abbrv] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-07-25 Thread elserj
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d79c3002
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d79c3002
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d79c3002

Branch: refs/heads/master
Commit: d79c30023af47f32e2dac9d871aa75265cebc34f
Parents: b21877d
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:35:38 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java|  6 +++---
 .../apache/phoenix/coprocessor/MetaDataProtocol.java | 15 +++
 .../apache/phoenix/exception/SQLExceptionCode.java   |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java|  4 ++--
 .../write/TrackingParallelWriterIndexCommitter.java  |  6 +++---
 .../phoenix/index/PhoenixTransactionalIndexer.java   |  4 ++--
 .../apache/phoenix/jdbc/PhoenixDatabaseMetaData.java |  1 +
 .../phoenix/query/ConnectionQueryServicesImpl.java   |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java|  4 ++--
 .../main/java/org/apache/phoenix/util/ScanUtil.java  |  3 +--
 10 files changed, 34 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d79c3002/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index a9b8267..50a1714 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -947,12 +947,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements RegionCopr
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3694,7 +3694,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements RegionCopr
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d79c3002/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 2a1c3a5..0bd1f8c 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -97,6 +97,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 
MIN_SYSTEM_TABLE_TIMESTAMP_5_0_0;
 
+// Version below which we should disallow usage of mutable secondary 
indexi

[05/50] [abbrv] phoenix git commit: PHOENIX-4459 Region assignments are failing for the test cases with extended clocks to support SCN(Sergey Soldatov)

2018-07-25 Thread elserj
PHOENIX-4459 Region assignments are failing for the test cases with extended 
clocks to support SCN(Sergey Soldatov)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c489587e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c489587e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c489587e

Branch: refs/heads/master
Commit: c489587e4ee91be922e7dca2ee7f1aba47d75a4d
Parents: eeea6c6
Author: Ankit Singhal 
Authored: Fri Jun 1 14:10:15 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 1 14:10:15 2018 -0700

--
 .../phoenix/end2end/ConcurrentMutationsIT.java  |  76 +-
 .../end2end/ExplainPlanWithStatsEnabledIT.java  |  17 ++-
 .../end2end/index/PartialIndexRebuilderIT.java  | 105 +++
 3 files changed, 54 insertions(+), 144 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c489587e/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConcurrentMutationsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConcurrentMutationsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConcurrentMutationsIT.java
index 01133f1..ffc1049 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConcurrentMutationsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ConcurrentMutationsIT.java
@@ -55,7 +55,6 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 
 @RunWith(RunUntilFailure.class)
-@Ignore
 public class ConcurrentMutationsIT extends ParallelStatsDisabledIT {
 private static final Random RAND = new Random(5);
 private static final String MVCC_LOCK_TEST_TABLE_PREFIX = "MVCCLOCKTEST_"; 
 
@@ -66,6 +65,7 @@ public class ConcurrentMutationsIT extends 
ParallelStatsDisabledIT {
 
 private static class MyClock extends EnvironmentEdge {
 public volatile long time;
+boolean shouldAdvance = true;
 
 public MyClock (long time) {
 this.time = time;
@@ -73,7 +73,14 @@ public class ConcurrentMutationsIT extends 
ParallelStatsDisabledIT {
 
 @Override
 public long currentTime() {
-return time;
+if (shouldAdvance) {
+return time++;
+} else {
+return time;
+}
+}
+public void setAdvance(boolean val) {
+shouldAdvance = val;
 }
 }
 
@@ -422,21 +429,15 @@ public class ConcurrentMutationsIT extends 
ParallelStatsDisabledIT {
 String tableName = generateUniqueName();
 String indexName = generateUniqueName();
 Properties props = 
PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
-long ts = 1000;
-clock.time = ts;
-Connection conn = DriverManager.getConnection(getUrl(), props);
 
+Connection conn = DriverManager.getConnection(getUrl(), props);
 conn.createStatement().execute("CREATE TABLE " + tableName + "(k1 
CHAR(2) NOT NULL, k2 CHAR(2) NOT NULL, ts TIMESTAMP, V VARCHAR, V2 VARCHAR, 
CONSTRAINT pk PRIMARY KEY (k1,k2)) COLUMN_ENCODED_BYTES = 0");
 conn.close();
 
-ts = 1010;
-clock.time = ts;
 conn = DriverManager.getConnection(getUrl(), props);
 conn.createStatement().execute("CREATE INDEX " + indexName + " ON 
" + tableName + "(k2,k1,ts) INCLUDE (V, v2)");
 conn.close();
 
-ts = 1020;
-clock.time = ts;
-conn = DriverManager.getConnection(getUrl(), props);
+conn = DriverManager.getConnection(getUrl(), props);
 PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + 
tableName + " VALUES('aa','aa',?, '0')");
 stmt.setTimestamp(1, new Timestamp(1000L));
 stmt.executeUpdate();
@@ -444,8 +445,7 @@ public class ConcurrentMutationsIT extends 
ParallelStatsDisabledIT {
 conn.close();
 
 Timestamp expectedTimestamp;
-ts = 1040;
-clock.time = ts;
+clock.setAdvance(false);
 conn = DriverManager.getConnection(getUrl(), props);
 stmt = conn.prepareStatement("UPSERT INTO " + tableName + " 
VALUES('aa','aa',?, null)");
 expectedTimestamp = null;
@@ -455,10 +455,9 @@ public class ConcurrentMutationsIT extends 
ParallelStatsDisabledIT {
 stmt.setTimestamp(1, new Timestamp(3000L));
 stmt.executeUpdate();
 conn.commit();
+clock.setAdvance(true);
 conn.close();
 
-ts = 1050;
-clock.time = ts;
 conn = DriverManager.getConnection(getUrl(), props);
 
 IndexScrutiny.scrutinizeIndex(conn, tableName, index

[08/50] [abbrv] phoenix git commit: This reverts commit 3a7c9033936779f5290f4fabdd4912c27f7af64f to just fix the JIRA No.

2018-07-25 Thread elserj
This reverts commit 3a7c9033936779f5290f4fabdd4912c27f7af64f to just fix the 
JIRA No.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3df50d8c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3df50d8c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3df50d8c

Branch: refs/heads/master
Commit: 3df50d8c2a38f53e7c978bd0d36197167256e306
Parents: 3a7c903
Author: Ankit Singhal 
Authored: Fri Jun 1 14:15:14 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 1 14:15:14 2018 -0700

--
 .../org/apache/phoenix/schema/stats/StatsCollectorIT.java| 8 ++--
 1 file changed, 6 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3df50d8c/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
index c2325ae..3af0d09 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/schema/stats/StatsCollectorIT.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
@@ -68,6 +69,7 @@ import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -403,11 +405,13 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 }
 
 @Test
+@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStats() throws Exception {
 testCompactUpdatesStats(0, fullTableName);
 }
 
 @Test
+@Ignore //TODO remove this once  
https://issues.apache.org/jira/browse/TEPHRA-208 is fixed
 public void testCompactUpdatesStatsWithMinStatsUpdateFreq() throws 
Exception {
 
testCompactUpdatesStats(QueryServicesOptions.DEFAULT_STATS_UPDATE_FREQ_MS, 
fullTableName);
 }
@@ -460,7 +464,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 Scan scan = new Scan();
 scan.setRaw(true);
 PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {
@@ -473,7 +477,7 @@ public abstract class StatsCollectorIT extends 
BaseUniqueNamesOwnClusterIT {
 scan = new Scan();
 scan.setRaw(true);
 phxConn = conn.unwrap(PhoenixConnection.class);
-try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(physicalTableName))) {
+try (Table htable = 
phxConn.getQueryServices().getTable(Bytes.toBytes(tableName))) {
 ResultScanner scanner = htable.getScanner(scan);
 Result result;
 while ((result = scanner.next())!=null) {



[23/50] [abbrv] phoenix git commit: PHOENIX-4772 phoenix.sequence.saltBuckets is not honoured

2018-07-25 Thread elserj
PHOENIX-4772 phoenix.sequence.saltBuckets is not honoured


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/dd317c7f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/dd317c7f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/dd317c7f

Branch: refs/heads/4.x-HBase-1.4
Commit: dd317c7f242aa59e796e36d58290cf70c5b3e0d1
Parents: 69bb8b0
Author: Ankit Singhal 
Authored: Thu Jun 7 11:09:08 2018 -0700
Committer: Ankit Singhal 
Committed: Thu Jun 7 11:09:08 2018 -0700

--
 .../org/apache/phoenix/end2end/SequenceIT.java  |  57 +-
 .../phoenix/end2end/SequencePointInTimeIT.java  | 112 +++
 .../query/ConnectionQueryServicesImpl.java  |  13 ++-
 .../query/ConnectionlessQueryServicesImpl.java  |   7 +-
 .../apache/phoenix/query/QueryConstants.java|   2 +
 .../org/apache/phoenix/schema/Sequence.java |   6 +-
 6 files changed, 139 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/dd317c7f/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
index 4cc9628..b76cc4e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SequenceIT.java
@@ -18,6 +18,8 @@
 
 package org.apache.phoenix.end2end;
 
+import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TYPE_SEQUENCE;
 import static 
org.apache.phoenix.query.QueryServicesTestImpl.DEFAULT_SEQUENCE_CACHE_SIZE;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
@@ -38,6 +40,7 @@ import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesTestImpl;
 import org.apache.phoenix.schema.SchemaNotFoundException;
 import org.apache.phoenix.schema.SequenceAlreadyExistsException;
 import org.apache.phoenix.schema.SequenceNotFoundException;
@@ -202,6 +205,8 @@ public class SequenceIT extends ParallelStatsDisabledIT {
 String schemaName = getSchemaName(sequenceName);
 
 conn.createStatement().execute("CREATE SEQUENCE " + sequenceName + " 
START WITH 2 INCREMENT BY 4");
+int bucketNum = PhoenixRuntime.getTableNoCache(conn, 
SYSTEM_CATALOG_SCHEMA + "." + TYPE_SEQUENCE).getBucketNum();
+assertEquals("Salt bucket for SYSTEM.SEQUENCE should be test 
default",bucketNum , QueryServicesTestImpl.DEFAULT_SEQUENCE_TABLE_SALT_BUCKETS);
 String query = "SELECT sequence_schema, sequence_name, current_value, 
increment_by FROM \"SYSTEM\".\"SEQUENCE\" WHERE sequence_name='" + 
sequenceNameWithoutSchema + "'";
 ResultSet rs = conn.prepareStatement(query).executeQuery();
 assertTrue(rs.next());
@@ -1406,56 +1411,4 @@ public class SequenceIT extends ParallelStatsDisabledIT {
return tableName.substring(tableName.indexOf(".") + 1, 
tableName.length());
 }
 
-@Test
-public void testPointInTimeSequence() throws Exception {
-String seqName = generateSequenceNameWithSchema(); 
-Properties scnProps = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-scnProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(EnvironmentEdgeManager.currentTimeMillis()));
-Connection beforeSeqConn = DriverManager.getConnection(getUrl(), 
scnProps);
-
-ResultSet rs;
-Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-Connection conn = DriverManager.getConnection(getUrl(), props);
-conn.createStatement().execute("CREATE SEQUENCE " + seqName + "");
-
-try {
-beforeSeqConn.createStatement().executeQuery("SELECT next value 
for " + seqName);
-fail();
-} catch (SequenceNotFoundException e) {
-beforeSeqConn.close();
-}
-
-scnProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(EnvironmentEdgeManager.currentTimeMillis()));
-Connection afterSeqConn = DriverManager.getConnection(getUrl(), 
scnProps);
-
-rs = conn.createStatement().executeQuery("SELECT next value for " + 
seqName);
-assertTrue(rs.next());
-assertEquals(1, rs.getInt(1));
-rs = conn.createStatement().executeQuery("SELECT next value for " + 
seqName);
-assertTrue(rs.next());
-assertEquals(2, rs.getInt(1));
-
-conn.createStatement().ex

[06/50] [abbrv] phoenix git commit: PHOENIX-4706 Remove bundling dependencies into phoenix-core

2018-07-25 Thread elserj
PHOENIX-4706 Remove bundling dependencies into phoenix-core


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ea949519
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ea949519
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ea949519

Branch: refs/heads/4.x-HBase-1.4
Commit: ea9495192d2256b9f81a06ee327526836b30259b
Parents: 28b9de0
Author: Josh Elser 
Authored: Mon May 7 20:01:35 2018 -0700
Committer: Josh Elser 
Committed: Mon May 21 17:27:40 2018 -0400

--
 phoenix-core/pom.xml | 28 
 1 file changed, 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea949519/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 9e9a15b..a29fb01 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -170,34 +170,6 @@
 
   
   
-
-maven-assembly-plugin
-
-  
-core
-package
-
-  single
-
-
-  false
-  phoenix
-  
-true
-
-  true
-  
true
-  
true
-
-  
-  
-src/build/phoenix-core.xml
-  
-
-  
-  
-  
-  
 org.apache.maven.plugins
 maven-failsafe-plugin
   



[48/50] [abbrv] phoenix git commit: PHOENIX-4818 Fix RAT check for missing licenses

2018-07-25 Thread elserj
PHOENIX-4818 Fix RAT check for missing licenses


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/360fb805
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/360fb805
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/360fb805

Branch: refs/heads/master
Commit: 360fb805d63268d150129a6ba24a542ed171e27b
Parents: d56fd3c
Author: Vincent Poon 
Authored: Sun Jul 22 14:19:15 2018 -0700
Committer: Vincent Poon 
Committed: Sun Jul 22 14:19:15 2018 -0700

--
 .../phoenix/end2end/SplitSystemCatalogTests.java   | 17 +
 .../end2end/StatsEnabledSplitSystemCatalogIT.java  | 17 +
 .../apache/phoenix/query/ConnectionCachingIT.java  | 17 +
 .../schema/ParentTableNotFoundException.java   | 17 +
 4 files changed, 68 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/360fb805/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
index 27fc5c6..67d3fd3 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.phoenix.end2end;
 
 import java.lang.annotation.ElementType;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/360fb805/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
index 1226341..d6101bf 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.phoenix.end2end;
 
 import static org.apache.phoenix.util.TestUtil.analyzeTable;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/360fb805/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
index b2ef052..d1dda04 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 

[34/50] [abbrv] phoenix git commit: PHOENIX-4805 Move Avatica version to 1.12 for PQS

2018-07-25 Thread elserj
PHOENIX-4805 Move Avatica version to 1.12 for PQS


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/da274302
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/da274302
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/da274302

Branch: refs/heads/4.x-HBase-1.4
Commit: da2743027582de3f4b6001be8ac1eadf4f008174
Parents: 2883226
Author: Karan Mehta 
Authored: Fri Jul 6 13:35:52 2018 -0700
Committer: Karan Mehta 
Committed: Thu Jul 12 09:24:24 2018 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/da274302/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 13f137e..075e736 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,7 +98,7 @@
 
 1.6
 2.1.2
-1.10.0
+1.12.0
 8.1.7.v20120910
 0.14.0-incubating
 2.0.2



[01/50] [abbrv] phoenix git commit: PHOENIX-4756 Integration tests for PhoenixStorageHandler doesn't work on 5.x branch [Forced Update!]

2018-07-25 Thread elserj
Repository: phoenix
Updated Branches:
  refs/heads/master 6b363b3a2 -> 8a874cc95 (forced update)


PHOENIX-4756 Integration tests for PhoenixStorageHandler doesn't work on 5.x 
branch


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b21877d7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b21877d7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b21877d7

Branch: refs/heads/master
Commit: b21877d75b66cb2c738f4949b444623981f30d95
Parents: 8a955d4
Author: Sergey Soldatov 
Authored: Sun May 27 23:05:00 2018 -0700
Committer: ss77892 
Committed: Wed May 30 21:03:44 2018 -0700

--
 phoenix-hive/pom.xml| 29 
 .../apache/phoenix/hive/HivePhoenixStoreIT.java |  9 --
 .../PhoenixStorageHandlerConstants.java |  4 +--
 .../PhoenixByteObjectInspector.java |  2 +-
 .../PhoenixDoubleObjectInspector.java   |  2 +-
 5 files changed, 39 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b21877d7/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 8b9b4c1..0bc582c 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -78,6 +78,12 @@
 
   org.apache.hadoop
   hadoop-mapreduce-client-core
+  
+
+  io.netty
+  netty
+
+  
 
 
 
@@ -105,6 +111,11 @@
   test
 
 
+  io.netty
+  netty-all
+  4.1.17.Final
+
+
   org.apache.hadoop
   hadoop-hdfs
   test-jar
@@ -156,6 +167,24 @@
 
   
 
+  
+org.eclipse.jetty
+jetty-util
+test
+9.3.8.v20160314
+  
+  
+org.eclipse.jetty
+jetty-http
+test
+9.3.8.v20160314
+  
+  
+org.eclipse.jetty
+jetty-server
+test
+9.3.8.v20160314
+  
 
   org.mockito
   mockito-all

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b21877d7/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index a62d780..ecb2003 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -315,7 +315,7 @@ public class HivePhoenixStoreIT  extends 
BaseHivePhoenixStoreIT {
 public void testTimestampPredicate() throws Exception {
 String testName = "testTimeStampPredicate";
 hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, 
testName + ".out"));
-createFile("10\t2013-01-02 01:01:01.123456\n", new Path(hiveOutputDir, 
testName + ".out").toString());
+createFile("10\t2013-01-02 01:01:01.123\n", new Path(hiveOutputDir, 
testName + ".out").toString());
 createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + 
".out").toString());
 
 StringBuilder sb = new StringBuilder();
@@ -330,9 +330,12 @@ public class HivePhoenixStoreIT  extends 
BaseHivePhoenixStoreIT {
 hbaseTestUtil.getZkCluster().getClientPort() + "'," + 
HiveTestUtil.CRLF +
 "   'phoenix.column.mapping' = 'id:ID, ts:TS'," + 
HiveTestUtil.CRLF +
 "   'phoenix.rowkeys'='id');" + HiveTestUtil.CRLF);
+/*
+Following query only for check that nanoseconds are correctly parsed 
with over 3 digits.
+ */
 sb.append("INSERT INTO TABLE timeStampTable VALUES (10, \"2013-01-02 
01:01:01.123456\");" + HiveTestUtil.CRLF);
-sb.append("SELECT * from timeStampTable WHERE ts between '2013-01-02 
01:01:01.123455' and " +
-" '2013-01-02 12:01:02.123457789' AND id = 10;" + 
HiveTestUtil.CRLF);
+sb.append("SELECT * from timeStampTable WHERE ts between '2012-01-02 
01:01:01.123455' and " +
+" '2015-01-02 12:01:02.123457789' AND id = 10;" + 
HiveTestUtil.CRLF);
 
 String fullPath = new Path(hbaseTestUtil.getDataTestDir(), 
testName).toString();
 createFile(sb.toString(), fullPath);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b21877d7/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
--
diff --git 
a/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
 
b/phoenix-hive/src/main/java/org/apache/phoenix/hive/constants/PhoenixStorageHandlerConstants.java
index e3c

[48/50] [abbrv] phoenix git commit: PHOENIX-4818 Fix RAT check for missing licenses

2018-07-25 Thread elserj
PHOENIX-4818 Fix RAT check for missing licenses


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/135b890e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/135b890e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/135b890e

Branch: refs/heads/4.x-HBase-1.4
Commit: 135b890ef6bbce3664092bcdd2ebedc367dedd68
Parents: c53d9ad
Author: Vincent Poon 
Authored: Sun Jul 22 14:19:15 2018 -0700
Committer: Vincent Poon 
Committed: Sun Jul 22 14:19:35 2018 -0700

--
 .../phoenix/end2end/SplitSystemCatalogTests.java   | 17 +
 .../end2end/StatsEnabledSplitSystemCatalogIT.java  | 17 +
 .../apache/phoenix/query/ConnectionCachingIT.java  | 17 +
 .../schema/ParentTableNotFoundException.java   | 17 +
 4 files changed, 68 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/135b890e/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
index 27fc5c6..67d3fd3 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SplitSystemCatalogTests.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.phoenix.end2end;
 
 import java.lang.annotation.ElementType;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/135b890e/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
index e25415a..197263f 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsEnabledSplitSystemCatalogIT.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.phoenix.end2end;
 
 import static org.apache.phoenix.util.TestUtil.analyzeTable;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/135b890e/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
index b2ef052..d1dda04 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, V

[23/50] [abbrv] phoenix git commit: PHOENIX-4788 Shade Joda libraries in phoenix-server to avoid conflict with hbase shell

2018-07-25 Thread elserj
PHOENIX-4788 Shade Joda libraries in phoenix-server to avoid conflict with 
hbase shell


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/aa2f12db
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/aa2f12db
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/aa2f12db

Branch: refs/heads/master
Commit: aa2f12db9ac7e90fbc26f7906e184131f4917782
Parents: 69b50f6
Author: Ankit Singhal 
Authored: Fri Jun 22 15:57:04 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 22 15:57:04 2018 -0700

--
 phoenix-server/pom.xml | 9 -
 1 file changed, 8 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/aa2f12db/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index e06eba9..2dbbb2b 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -40,6 +40,7 @@
 true
 true
 ${project.basedir}/..
+org.apache.phoenix.shaded
   
 
   
@@ -124,11 +125,11 @@
   org.apache.phoenix:phoenix-core
   org.iq80.snappy:snappy
   org.antlr:antlr*
+  joda-time:joda-time
   org.apache.tephra:tephra*
   com.google.code.gson:gson
   org.jruby.joni:joni
   org.jruby.jcodings:jcodings
-  joda-time:joda-time
   org.apache.twill:twill*
   
com.google.inject.extensions:guice-assistedinject
   it.unimi.dsi:fastutil
@@ -157,6 +158,12 @@
   
 
   
+ 
+
+  org.joda
+  ${shaded.package}.org.joda
+
+ 
 
   
 



[22/50] [abbrv] phoenix git commit: PHOENIX-4787 Upgrade spark version to 2.3.0

2018-07-25 Thread elserj
PHOENIX-4787 Upgrade spark version to 2.3.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/69b50f6c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/69b50f6c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/69b50f6c

Branch: refs/heads/master
Commit: 69b50f6caecd1c994412ae887726c1347f19e730
Parents: 56318da
Author: Ankit Singhal 
Authored: Fri Jun 22 11:58:34 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 22 11:58:34 2018 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/69b50f6c/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 78aff68..054ad78 100644
--- a/pom.xml
+++ b/pom.xml
@@ -100,7 +100,7 @@
 1.11.0
 9.3.19.v20170502
 0.14.0-incubating
-2.0.2
+2.3.0
 2.11.8
 2.11
 2.9.5



[11/50] [abbrv] phoenix git commit: PHOENIX-4749 Allow impersonation when SPNEGO is disabled

2018-07-25 Thread elserj
PHOENIX-4749 Allow impersonation when SPNEGO is disabled

Client impersonation is no longer tied to SPNEGO auth.

Signed-off-by: Josh Elser 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c35daba3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c35daba3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c35daba3

Branch: refs/heads/4.x-HBase-1.4
Commit: c35daba3210ae013f4091e669d4f5831c1d19a81
Parents: 0c53d60
Author: Alex Araujo 
Authored: Wed May 23 10:28:48 2018 -0500
Committer: Josh Elser 
Committed: Fri May 25 17:32:32 2018 -0400

--
 .../phoenix/queryserver/server/QueryServer.java | 90 +++-
 .../server/QueryServerConfigurationTest.java| 72 
 2 files changed, 121 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c35daba3/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
--
diff --git 
a/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
 
b/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
index 8436086..e3f0f52 100644
--- 
a/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
+++ 
b/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
@@ -54,6 +54,7 @@ import org.apache.phoenix.queryserver.register.Registry;
 import org.apache.phoenix.util.InstanceResolver;
 
 import java.io.File;
+import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.lang.management.RuntimeMXBean;
 import java.net.InetAddress;
@@ -197,7 +198,7 @@ public final class QueryServer extends Configured 
implements Tool, Runnable {
   
QueryServicesOptions.DEFAULT_QUERY_SERVER_DISABLE_KERBEROS_LOGIN);
 
   // handle secure cluster credentials
-  if (isKerberos && !disableSpnego && !disableLogin) {
+  if (isKerberos && !disableLogin) {
 hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
 getConf().get(QueryServices.QUERY_SERVER_DNS_INTERFACE_ATTRIB, 
"default"),
 getConf().get(QueryServices.QUERY_SERVER_DNS_NAMESERVER_ATTRIB, 
"default")));
@@ -230,47 +231,9 @@ public final class QueryServer extends Configured 
implements Tool, Runnable {
   final HttpServer.Builder builder = new 
HttpServer.Builder().withPort(port)
   .withHandler(service, getSerialization(getConf()));
 
-  // Enable SPNEGO and Impersonation when using Kerberos
+  // Enable client auth when using Kerberos auth for HBase
   if (isKerberos) {
-UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-LOG.debug("Current user is " + ugi);
-if (!ugi.hasKerberosCredentials()) {
-  ugi = UserGroupInformation.getLoginUser();
-  LOG.debug("Current user does not have Kerberos credentials, using 
instead " + ugi);
-}
-
-// Make sure the proxyuser configuration is up to date
-ProxyUsers.refreshSuperUserGroupsConfiguration(getConf());
-
-String keytabPath = 
getConf().get(QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB);
-File keytab = new File(keytabPath);
-String httpKeytabPath = 
getConf().get(QueryServices.QUERY_SERVER_HTTP_KEYTAB_FILENAME_ATTRIB, null);
-String httpPrincipal = 
getConf().get(QueryServices.QUERY_SERVER_KERBEROS_HTTP_PRINCIPAL_ATTRIB, null);
-// Backwards compat for a configuration key change
-if (httpPrincipal == null) {
-  httpPrincipal = 
getConf().get(QueryServices.QUERY_SERVER_KERBEROS_HTTP_PRINCIPAL_ATTRIB_LEGACY, 
null);
-}
-File httpKeytab = null;
-if (null != httpKeytabPath)
-  httpKeytab = new File(httpKeytabPath);
-
-String realmsString = 
getConf().get(QueryServices.QUERY_SERVER_KERBEROS_ALLOWED_REALMS, null);
-String[] additionalAllowedRealms = null;
-if (null != realmsString) {
-additionalAllowedRealms = StringUtils.split(realmsString, ',');
-}
-
-// Enable SPNEGO and impersonation (through standard Hadoop 
configuration means)
-if ((null != httpKeytabPath) && (null != httpPrincipal))
-  builder.withSpnego(httpPrincipal, additionalAllowedRealms)
-  .withAutomaticLogin(httpKeytab)
-  .withImpersonation(new PhoenixDoAsCallback(ugi, getConf()));
-else
-  builder.withSpnego(ugi.getUserName(), additionalAllowedRealms)
-  .withAutomaticLogin(keytab)
-  .withImpersonation(new PhoenixDoAsCallback(ugi, getConf()));
-
-
+configureClientAuthentication(builder, disableSpnego);
   }
  

[40/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
index 9f4cf97..56d8698 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
@@ -30,9 +30,10 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.compile.ColumnProjector;
 import org.apache.phoenix.compile.ExpressionProjector;
@@ -41,7 +42,12 @@ import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.KeyValueColumnExpression;
+import org.apache.phoenix.expression.LikeExpression;
+import org.apache.phoenix.expression.LiteralExpression;
 import org.apache.phoenix.expression.RowKeyColumnExpression;
+import org.apache.phoenix.expression.StringBasedLikeExpression;
 import org.apache.phoenix.expression.function.ExternalSqlTypeIdFunction;
 import org.apache.phoenix.expression.function.IndexStateNameFunction;
 import org.apache.phoenix.expression.function.SQLIndexTypeFunction;
@@ -49,26 +55,34 @@ import 
org.apache.phoenix.expression.function.SQLTableTypeFunction;
 import org.apache.phoenix.expression.function.SQLViewTypeFunction;
 import org.apache.phoenix.expression.function.SqlTypeNameFunction;
 import org.apache.phoenix.expression.function.TransactionProviderNameFunction;
-import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.VersionUtil;
-import org.apache.phoenix.iterate.DelegateResultIterator;
 import org.apache.phoenix.iterate.MaterializedResultIterator;
 import org.apache.phoenix.iterate.ResultIterator;
+import org.apache.phoenix.parse.LikeParseNode.LikeType;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.MetaDataClient;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.PNameFactory;
+import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.LinkType;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeyValueAccessor;
 import org.apache.phoenix.schema.SortOrder;
-import org.apache.phoenix.schema.tuple.ResultTuple;
+import org.apache.phoenix.schema.tuple.MultiKeyValueTuple;
 import org.apache.phoenix.schema.tuple.SingleKeyValueTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PInteger;
+import org.apache.phoenix.schema.types.PSmallint;
+import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.schema.types.PVarchar;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.PhoenixKeyValueUtil;
+import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.StringUtil;
 
@@ -354,6 +368,11 @@ public class PhoenixDatabaseMetaData implements 
DatabaseMetaData {
 public static final byte[] COLUMN_QUALIFIER_COUNTER_BYTES = 
Bytes.toBytes(COLUMN_QUALIFIER_COUNTER);
 public static final String USE_STATS_FOR_PARALLELIZATION = 
"USE_STATS_FOR_PARALLELIZATION";
 public static final byte[] USE_STATS_FOR_PARALLELIZATION_BYTES = 
Bytes.toBytes(USE_STATS_FOR_PARALLELIZATION);
+
+public static final String SYSTEM_CHILD_LINK_TABLE = "CHILD_LINK";
+public static final String SYSTEM_CHILD_LINK_NAME = 
SchemaUtil.getTableName(SYSTEM_CATALOG_SCHEMA, SYSTEM_CHILD_LINK_TABLE);
+public static final byte[] SYSTEM_CHILD_LINK_NAME_BYTES = 
Bytes.toBytes(SYSTEM_CHILD_LINK_NAME);
+public static final TableName SYSTEM_LINK_HBASE_TABLE_NAME = 
TableName.valueOf(SYSTEM_CHILD_LINK_NAME);
 
 
 //SYSTEM:LOG
@@ -485,179 +504,353 @@ public class PhoenixDatabaseMetaData implements 
DatabaseMetaData {
 private static void appendConjunction(StringBuilder buf) {
 buf.append(buf.length() == 0 ? "" : " and ");
 }
-
+
+private static fin

[22/50] [abbrv] phoenix git commit: PHOENIX-4774 Disable doclint in 1.8+ JDKs

2018-07-25 Thread elserj
PHOENIX-4774 Disable doclint in 1.8+ JDKs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/69bb8b07
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/69bb8b07
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/69bb8b07

Branch: refs/heads/4.x-HBase-1.4
Commit: 69bb8b0738fd93fe360ef735d462d84b6f9f1fa8
Parents: 32154df
Author: Alex Araujo 
Authored: Tue Jun 5 11:20:17 2018 -0700
Committer: Vincent Poon 
Committed: Wed Jun 6 11:18:49 2018 -0700

--
 pom.xml | 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/69bb8b07/pom.xml
--
diff --git a/pom.xml b/pom.xml
index b8b9c3a..13f137e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -408,6 +408,9 @@
   
   
 
+
+  ${javadoc.opts}
+
   
 
   
@@ -962,6 +965,16 @@
   
 
   
+
+
+  java8-doclint-disabled
+  
+[1.8,)
+  
+  
+-Xdoclint:none
+  
+
 
 
   release



[46/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
index ab3a4ab..e39d492 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
@@ -21,6 +21,8 @@ import static 
org.apache.phoenix.exception.SQLExceptionCode.CANNOT_MUTATE_TABLE;
 import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -33,37 +35,46 @@ import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collection;
-import java.util.Properties;
+import java.util.List;
 
 import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.coprocessor.TephraTransactionalProcessor;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.ColumnNotFoundException;
+import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTableType;
-import org.apache.phoenix.util.PropertiesUtil;
-import org.apache.phoenix.util.StringUtil;
-import org.apache.phoenix.util.TestUtil;
+import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.IndexUtil;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.SchemaUtil;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+
 @RunWith(Parameterized.class)
-public class AlterTableWithViewsIT extends ParallelStatsDisabledIT {
-
+public class AlterTableWithViewsIT extends SplitSystemCatalogIT {
+
 private final boolean isMultiTenant;
 private final boolean columnEncoded;
-
-private final String TENANT_SPECIFIC_URL1 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=tenant1";
-private final String TENANT_SPECIFIC_URL2 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=tenant2";
+private final String TENANT_SPECIFIC_URL1 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=" + TENANT1;
+private final String TENANT_SPECIFIC_URL2 = getUrl() + ';' + 
TENANT_ID_ATTRIB + "=" + TENANT2;
 
 public AlterTableWithViewsIT(boolean isMultiTenant, boolean columnEncoded) 
{
 this.isMultiTenant = isMultiTenant;
@@ -77,6 +88,14 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
 { true, false }, { true, true } });
 }
 
+// transform PColumn to String
+private Function function = new Function(){
+@Override
+public String apply(PColumn input) {
+return input.getName().getString();
+}
+};
+
 private String generateDDL(String format) {
 return generateDDL("", format);
 }
@@ -101,8 +120,9 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {
 public void testAddNewColumnsToBaseTableWithViews() throws Exception {
 try (Connection conn = DriverManager.getConnection(getUrl());
 Connection viewConn = isMultiTenant ? 
DriverManager.getConnection(TENANT_SPECIFIC_URL1) : conn ) {   
-String tableName = generateUniqueName();
-String viewOfTable = tableName + "_VIEW";
+String tableName = SchemaUtil.getTableName(SCHEMA1, 
generateUniqueName());
+String viewOfTable = SchemaUtil.getTableName(SCHEMA2, 
generateUniqueName());
+
 String ddlFormat = "CREATE TABLE IF NOT EXISTS " + tableName + " ("
 + " %s ID char(1) NOT NULL,"
 + " COL1 integer NOT NULL,"
@@ -113,12 +133,13 @@ public class AlterTableWithViewsIT extends 
ParallelStatsDisabledIT {

[42/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 8a32d62..e24de29 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.coprocessor;
 
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
 import static org.apache.hadoop.hbase.KeyValueUtil.createFirstOnRow;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.APPEND_ONLY_SCHEMA_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE_BYTES;
@@ -55,7 +53,6 @@ import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.MULTI_TENANT_BYTES
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NULLABLE_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NUM_ARGS_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ORDINAL_POSITION_BYTES;
-import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARENT_TENANT_ID_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PK_NAME_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.RETURN_TYPE_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SALT_BUCKETS_BYTES;
@@ -78,9 +75,8 @@ import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_INDEX_ID_BYTE
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_STATEMENT_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_TYPE_BYTES;
 import static 
org.apache.phoenix.query.QueryConstants.DIVERGED_VIEW_BASE_COLUMN_COUNT;
-import static org.apache.phoenix.query.QueryConstants.SEPARATOR_BYTE_ARRAY;
 import static org.apache.phoenix.schema.PTableType.INDEX;
-import static org.apache.phoenix.util.ByteUtil.EMPTY_BYTE_ARRAY;
+import static org.apache.phoenix.schema.PTableType.TABLE;
 import static org.apache.phoenix.util.SchemaUtil.getVarCharLength;
 import static org.apache.phoenix.util.SchemaUtil.getVarChars;
 
@@ -91,14 +87,16 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.ListIterator;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.NavigableMap;
+import java.util.Properties;
 import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
@@ -116,19 +114,14 @@ import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
-import org.apache.hadoop.hbase.filter.PageFilter;
-import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.ipc.RpcCall;
 import org.apache.hadoop.hbase.ipc.RpcUtil;
@@ -141,6 +134,7 @@ import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.VersionInfo;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.GlobalCache.FunctionBytesPtr;
+import org.apache.phoenix.compile.ColumnNameTrackingExpressionCompiler;
 import org.apache.phoenix.compile.ColumnResolver;
 import org.apache.phoenix.compile.FromCompiler;
 import org.apache.phoenix.compile.QueryPlan;
@@ -184,6 +178,7 @@ import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.jdbc.PhoenixResultSet;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.metrics.Metrics;
+import org.apache.phoenix.parse.DropTableStatement;
 import org.apache.phoenix.parse.LiteralParseNode;
 import org.apache.phoenix.parse.PFunction;
 import org.apache.phoenix.parse.PFunction.FunctionArgu

[10/50] [abbrv] phoenix git commit: PHOENIX-1567 Publish Phoenix-Client & Phoenix-Server jars into Maven Repo

2018-07-25 Thread elserj
PHOENIX-1567 Publish Phoenix-Client & Phoenix-Server jars into Maven Repo


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0c53d601
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0c53d601
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0c53d601

Branch: refs/heads/4.x-HBase-1.4
Commit: 0c53d601a4851cceac284d500fa21ec9c2730ea6
Parents: 9335972
Author: Ankit Singhal 
Authored: Fri May 25 11:31:37 2018 -0700
Committer: Ankit Singhal 
Committed: Fri May 25 11:31:37 2018 -0700

--
 phoenix-client/pom.xml | 1 +
 phoenix-server/pom.xml | 1 +
 2 files changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0c53d601/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index 8d78d6b..d65da69 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -81,6 +81,7 @@
 
 
   ${basedir}/target/phoenix-${project.version}-client.jar
+ ${basedir}/pom.xml
 
   
   

http://git-wip-us.apache.org/repos/asf/phoenix/blob/0c53d601/phoenix-server/pom.xml
--
diff --git a/phoenix-server/pom.xml b/phoenix-server/pom.xml
index ba05953..de9d95b 100644
--- a/phoenix-server/pom.xml
+++ b/phoenix-server/pom.xml
@@ -85,6 +85,7 @@
 
 
   ${basedir}/target/phoenix-${project.version}-server.jar
+ ${basedir}/pom.xml
 
   
 



[31/50] [abbrv] phoenix git commit: PHOENIX-3383 Comparison between descending row keys used in RVC is reverse

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b91d7b0d/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
index f330208..c209b1f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/PhoenixTxIndexMutationGenerator.java
@@ -176,7 +176,7 @@ public class PhoenixTxIndexMutationGenerator {
 
 // Project empty key value column
 scan.addColumn(indexMaintainers.get(0).getDataEmptyKeyValueCF(), 
emptyKeyValueQualifier);
-ScanRanges scanRanges = 
ScanRanges.create(SchemaUtil.VAR_BINARY_SCHEMA, 
Collections.singletonList(keys), ScanUtil.SINGLE_COLUMN_SLOT_SPAN, 
KeyRange.EVERYTHING_RANGE, null, true, -1);
+ScanRanges scanRanges = 
ScanRanges.create(SchemaUtil.VAR_BINARY_SCHEMA, 
Collections.singletonList(keys), ScanUtil.SINGLE_COLUMN_SLOT_SPAN, null, true, 
-1);
 scanRanges.initializeScan(scan);
 Table txTable = 
indexMetaData.getTransactionContext().getTransactionalTable(htable, 
isImmutable);
 // For rollback, we need to see all versions, including

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b91d7b0d/phoenix-core/src/main/java/org/apache/phoenix/expression/function/FunctionExpression.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/FunctionExpression.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/FunctionExpression.java
index b45706a..bc9fa9f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/FunctionExpression.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/FunctionExpression.java
@@ -30,7 +30,15 @@ import org.apache.phoenix.expression.Expression;
  * @since 0.1
  */
 public abstract class FunctionExpression extends BaseCompoundExpression {
-public enum OrderPreserving {NO, YES_IF_LAST, YES};
+public enum OrderPreserving {NO, YES_IF_LAST, YES;
+
+public OrderPreserving combine(OrderPreserving that) {
+if (that == null) {
+return this;
+}
+return OrderPreserving.values()[Math.min(this.ordinal(), 
that.ordinal())];
+}};
+
 public FunctionExpression() {
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b91d7b0d/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InvertFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InvertFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InvertFunction.java
index 3615cbe..8ef5914 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InvertFunction.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/InvertFunction.java
@@ -96,7 +96,24 @@ public class InvertFunction extends ScalarFunction {
 @Override
 public KeyRange getKeyRange(CompareOp op, Expression rhs) {
 KeyRange range = childPart.getKeyRange(op, rhs);
-return range.invert();
+byte[] lower = range.getLowerRange();
+if (!range.lowerUnbound()) {
+lower = SortOrder.invert(lower, 0, lower.length);
+}
+byte[] upper;
+if (range.isSingleKey()) {
+upper = lower;
+} else {
+upper = range.getUpperRange();
+if (!range.upperUnbound()) {
+upper = SortOrder.invert(upper, 0, upper.length);
+}
+}
+range = KeyRange.getKeyRange(lower, range.isLowerInclusive(), 
upper, range.isUpperInclusive());
+if (getColumn().getSortOrder() == SortOrder.DESC) {
+range = range.invert();
+}
+return range;
 }
 
 @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b91d7b0d/phoenix-core/src/main/java/org/apache/phoenix/expression/function/PrefixFunction.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/PrefixFunction.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/function/PrefixFunction.java
index cb98e28..ff3e74d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/function/PrefixFunction.java
+++ 
b/phoenix-core/src/ma

[10/50] [abbrv] phoenix git commit: PHOENIX-4770 Re-enable pending tests which were ignored during 5.0.0 alpha

2018-07-25 Thread elserj
PHOENIX-4770 Re-enable pending tests which were ignored during 5.0.0 alpha


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3b1c0d31
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3b1c0d31
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3b1c0d31

Branch: refs/heads/master
Commit: 3b1c0d31743bc9d9a4b99d6a0d276c00ddbd837c
Parents: 09c017e
Author: Ankit Singhal 
Authored: Fri Jun 1 14:20:40 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 1 14:20:40 2018 -0700

--
 .../phoenix/end2end/IndexScrutinyToolIT.java|  8 +--
 .../phoenix/end2end/index/LocalIndexIT.java |  2 -
 .../schema/types/PDataTypeForArraysTest.java| 54 ++--
 3 files changed, 8 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3b1c0d31/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
index d5c50a1..692a98c 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexScrutinyToolIT.java
@@ -40,7 +40,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
-import com.google.common.collect.Sets;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -71,18 +70,18 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import com.google.common.collect.Sets;
 
 /**
  * Tests for the {@link IndexScrutinyTool}
  */
 @Category(NeedsOwnMiniClusterTest.class)
 @RunWith(Parameterized.class)
-@Ignore
 public class IndexScrutinyToolIT extends BaseTest {
 
 private String dataTableDdl;
@@ -192,6 +191,7 @@ public class IndexScrutinyToolIT extends BaseTest {
  * Since CURRENT_SCN is set, the scrutiny shouldn't report any issue.
  */
 @Test
+@Ignore("PHOENIX-4378 Unable to set KEEP_DELETED_CELLS to true on RS 
scanner")
 public void testScrutinyWhileTakingWrites() throws Exception {
 int id = 0;
 while (id < 1000) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3b1c0d31/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
index 15d938c..41616f2 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/LocalIndexIT.java
@@ -71,7 +71,6 @@ import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.TestUtil;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import com.google.common.collect.Lists;
@@ -587,7 +586,6 @@ public class LocalIndexIT extends BaseLocalIndexIT {
 }
 
 @Test
-@Ignore
 public void testLocalIndexAutomaticRepair() throws Exception {
 if (isNamespaceMapped) { return; }
 PhoenixConnection conn = 
DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3b1c0d31/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeForArraysTest.java
--
diff --git 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeForArraysTest.java
 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeForArraysTest.java
index 290c80f..792ec6b 100644
--- 
a/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeForArraysTest.java
+++ 
b/phoenix-core/src/test/java/org/apache/phoenix/schema/types/PDataTypeForArraysTest.java
@@ -17,6 +17,10 @@
  */
 package org.apache.phoenix.schema.types;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.math.BigDecimal;
 import java.sql.Array;
 import java.sql.Date;
@@ -31,57 +

[27/50] [abbrv] phoenix git commit: Updating version to 5.0.0-HBase-2.0

2018-07-25 Thread elserj
Updating version to 5.0.0-HBase-2.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/479fab07
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/479fab07
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/479fab07

Branch: refs/heads/master
Commit: 479fab07f5e5798bfafcc7aebbe93669d804b72b
Parents: 2d44700
Author: Rajeshbabu Chintaguntla 
Authored: Tue Jun 26 10:33:24 2018 -0700
Committer: Rajeshbabu Chintaguntla 
Committed: Tue Jun 26 10:33:24 2018 -0700

--
 phoenix-assembly/pom.xml   | 2 +-
 phoenix-client/pom.xml | 2 +-
 phoenix-core/pom.xml   | 2 +-
 phoenix-flume/pom.xml  | 2 +-
 phoenix-hive/pom.xml   | 2 +-
 phoenix-kafka/pom.xml  | 2 +-
 phoenix-load-balancer/pom.xml  | 2 +-
 phoenix-pherf/pom.xml  | 2 +-
 phoenix-pig/pom.xml| 2 +-
 phoenix-queryserver-client/pom.xml | 2 +-
 phoenix-queryserver/pom.xml| 2 +-
 phoenix-server/pom.xml | 2 +-
 phoenix-spark/pom.xml  | 2 +-
 phoenix-tracing-webapp/pom.xml | 2 +-
 pom.xml| 2 +-
 15 files changed, 15 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 7528ef9..77b1b83 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-SNAPSHOT
+5.0.0-HBase-2.0
   
   phoenix-assembly
   Phoenix Assembly

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index ca95e62..7b53483 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-SNAPSHOT
+5.0.0-HBase-2.0
   
   phoenix-client
   Phoenix Client

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 24e34f3..96610fd 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -4,7 +4,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-SNAPSHOT
+5.0.0-HBase-2.0
   
   phoenix-core
   Phoenix Core

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index 0c0386a..affd5ce 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -26,7 +26,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-SNAPSHOT
+5.0.0-HBase-2.0
   
   phoenix-flume
   Phoenix - Flume

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 0bc582c..d2c995a 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-SNAPSHOT
+5.0.0-HBase-2.0
   
   phoenix-hive
   Phoenix - Hive

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-kafka/pom.xml
--
diff --git a/phoenix-kafka/pom.xml b/phoenix-kafka/pom.xml
index 98bb858..8abf6fe 100644
--- a/phoenix-kafka/pom.xml
+++ b/phoenix-kafka/pom.xml
@@ -26,7 +26,7 @@

org.apache.phoenix
phoenix
-   5.0.0-SNAPSHOT
+   5.0.0-HBase-2.0

phoenix-kafka
Phoenix - Kafka

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-load-balancer/pom.xml
--
diff --git a/phoenix-load-balancer/pom.xml b/phoenix-load-balancer/pom.xml
index 169833d..53a370e 100644
--- a/phoenix-load-balancer/pom.xml
+++ b/phoenix-load-balancer/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-SNAPSHOT
+5.0.0-HBase-2.0
   
   phoenix-load-balancer
   Phoenix Load Balancer

http://git-wip-us.apache.org/repos/asf/phoenix/blob/479fab07/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 754a99f..b5d5631 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -15,7 +15,7 @@

org.apache.phoenix
phoenix
-   5.

[07/50] [abbrv] phoenix git commit: PHOENIX-4745 Update Tephra version to 0.14.0-incubating

2018-07-25 Thread elserj
PHOENIX-4745 Update Tephra version to 0.14.0-incubating


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f0430611
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f0430611
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f0430611

Branch: refs/heads/4.x-HBase-1.4
Commit: f04306112aaee4e7da2d492df70826bb9f806464
Parents: ea94951
Author: James Taylor 
Authored: Thu May 24 16:10:52 2018 -0700
Committer: James Taylor 
Committed: Thu May 24 16:10:52 2018 -0700

--
 phoenix-core/pom.xml| 2 +-
 .../java/org/apache/phoenix/query/QueryServicesOptions.java | 5 -
 pom.xml | 4 ++--
 3 files changed, 7 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f0430611/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index a29fb01..2bb7293 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -221,7 +221,7 @@
 
 
   org.apache.tephra
-  tephra-hbase-compat-1.3
+  tephra-hbase-compat-1.4
 
   
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f0430611/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java 
b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
index d708785..307c5dd 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
@@ -117,6 +117,7 @@ import org.apache.phoenix.trace.util.Tracing;
 import org.apache.phoenix.transaction.TransactionFactory;
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
+import org.apache.tephra.TxConstants;
 
 
 
@@ -440,7 +441,9 @@ public class QueryServicesOptions {
 .setIfUnset(UPLOAD_BINARY_DATA_TYPE_ENCODING, 
DEFAULT_UPLOAD_BINARY_DATA_TYPE_ENCODING)
 .setIfUnset(PHOENIX_ACLS_ENABLED,  DEFAULT_PHOENIX_ACLS_ENABLED)
 .setIfUnset(LOG_LEVEL,  DEFAULT_LOGGING_LEVEL)
-.setIfUnset(LOG_SAMPLE_RATE,  DEFAULT_LOG_SAMPLE_RATE);
+.setIfUnset(LOG_SAMPLE_RATE,  DEFAULT_LOG_SAMPLE_RATE)
+.setIfUnset(TxConstants.TX_PRE_014_CHANGESET_KEY, 
Boolean.FALSE.toString())
+;
 // HBase sets this to 1, so we reset it to something more appropriate.
 // Hopefully HBase will change this, because we can't know if a user 
set
 // it to 1, so we'll change it.

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f0430611/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 38c1c41..8ba338c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -100,7 +100,7 @@
 2.1.2
 1.10.0
 8.1.7.v20120910
-0.13.0-incubating
+0.14.0-incubating
 2.0.2
 2.11.8
 2.11
@@ -787,7 +787,7 @@
   
   
 org.apache.tephra
-tephra-hbase-compat-1.3
+tephra-hbase-compat-1.4
 ${tephra.version}
   
 



[16/50] [abbrv] phoenix git commit: PHOENIX-4776 Remove creation of .md5 files from dev/make_rc.sh

2018-07-25 Thread elserj
PHOENIX-4776 Remove creation of .md5 files from dev/make_rc.sh


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/16fa7f66
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/16fa7f66
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/16fa7f66

Branch: refs/heads/master
Commit: 16fa7f661496485cdf49d168747ca98777f5a58e
Parents: 7ecf474
Author: Pedro Boado 
Authored: Mon Jun 11 23:35:56 2018 +0100
Committer: Pedro Boado 
Committed: Mon Jun 11 23:49:20 2018 +0100

--
 dev/make_rc.sh | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/16fa7f66/dev/make_rc.sh
--
diff --git a/dev/make_rc.sh b/dev/make_rc.sh
index 47439d3..f6cd06c 100755
--- a/dev/make_rc.sh
+++ b/dev/make_rc.sh
@@ -106,13 +106,11 @@ function_sign() {
   # if on MAC OS
   if [[ "$OSTYPE" == "darwin"* ]]; then
 gpg2 --armor --output $phoenix_tar.asc --detach-sig $phoenix_tar;
-openssl md5 $phoenix_tar > $phoenix_tar.md5;
 openssl dgst -sha512 $phoenix_tar > $phoenix_tar.sha512;
 openssl dgst -sha256 $phoenix_tar >> $phoenix_tar.sha256;
   # all other OS
   else
 gpg --armor --output $phoenix_tar.asc --detach-sig $phoenix_tar;
-md5sum -b $phoenix_tar > $phoenix_tar.md5;
 sha512sum -b $phoenix_tar > $phoenix_tar.sha512;
 sha256sum -b $phoenix_tar >> $phoenix_tar.sha256;
   fi



[15/50] [abbrv] phoenix git commit: PHOENIX-4778 Fix rat:check failure on 5.x branch(Rajeshbabu)

2018-07-25 Thread elserj
PHOENIX-4778 Fix rat:check failure on 5.x branch(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7ecf4744
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7ecf4744
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7ecf4744

Branch: refs/heads/master
Commit: 7ecf47443b5dcc4c6993463e20f9a13ea8564cb0
Parents: abcf0d1
Author: Rajeshbabu Chintaguntla 
Authored: Mon Jun 11 12:00:37 2018 +0530
Committer: Rajeshbabu Chintaguntla 
Committed: Mon Jun 11 12:00:37 2018 +0530

--
 .../apache/phoenix/end2end/MutationStateIT.java| 17 +
 .../end2end/index/MutableIndexRebuilderIT.java | 17 +
 2 files changed, 34 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/7ecf4744/phoenix-core/src/it/java/org/apache/phoenix/end2end/MutationStateIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MutationStateIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MutationStateIT.java
index 2d5f360..36782c1 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MutationStateIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MutationStateIT.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.phoenix.end2end;
 
 import static org.junit.Assert.assertEquals;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/7ecf4744/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
index a29766f..e1c8f81 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.phoenix.end2end.index;
 
 import static org.junit.Assert.assertEquals;



[43/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
index cfaed72..4433e12 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java
@@ -35,8 +35,6 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Properties;
 
-import jline.internal.Log;
-
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.TableName;
@@ -75,25 +73,27 @@ import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.primitives.Doubles;
 
+import jline.internal.Log;
+
 @RunWith(Parameterized.class)
 public class MutableIndexIT extends ParallelStatsDisabledIT {
 
 protected final boolean localIndex;
 private final String tableDDLOptions;
-   
+
 public MutableIndexIT(Boolean localIndex, String txProvider, Boolean 
columnEncoded) {
-   this.localIndex = localIndex;
-   StringBuilder optionBuilder = new StringBuilder();
-   if (txProvider != null) {
-   optionBuilder.append("TRANSACTIONAL=true," + 
PhoenixDatabaseMetaData.TRANSACTION_PROVIDER + "='" + txProvider + "'");
-   }
-   if (!columnEncoded) {
+this.localIndex = localIndex;
+StringBuilder optionBuilder = new StringBuilder();
+if (txProvider != null) {
+optionBuilder.append("TRANSACTIONAL=true," + 
PhoenixDatabaseMetaData.TRANSACTION_PROVIDER + "='" + txProvider + "'");
+}
+if (!columnEncoded) {
 if (optionBuilder.length()!=0)
 optionBuilder.append(",");
 optionBuilder.append("COLUMN_ENCODED_BYTES=0");
 }
-   this.tableDDLOptions = optionBuilder.toString();
-   }
+this.tableDDLOptions = optionBuilder.toString();
+}
 
 private static Connection getConnection(Properties props) throws 
SQLException {
 
props.setProperty(QueryServices.INDEX_MUTATE_BATCH_SIZE_THRESHOLD_ATTRIB, 
Integer.toString(1));
@@ -106,7 +106,7 @@ public class MutableIndexIT extends ParallelStatsDisabledIT 
{
 return getConnection(props);
 }
 
-   
@Parameters(name="MutableIndexIT_localIndex={0},transactional={1},columnEncoded={2}")
 // name is used by failsafe as file name in reports
+
@Parameters(name="MutableIndexIT_localIndex={0},transactional={1},columnEncoded={2}")
 // name is used by failsafe as file name in reports
 public static Collection data() {
 return Arrays.asList(new Object[][] { 
 { false, null, false }, { false, null, true },
@@ -121,16 +121,16 @@ public class MutableIndexIT extends 
ParallelStatsDisabledIT {
 @Test
 public void testCoveredColumnUpdates() throws Exception {
 try (Connection conn = getConnection()) {
-   conn.setAutoCommit(false);
-   String tableName = "TBL_" + generateUniqueName();
-   String indexName = "IDX_" + generateUniqueName();
-   String fullTableName = 
SchemaUtil.getTableName(TestUtil.DEFAULT_SCHEMA_NAME, tableName);
-   String fullIndexName = 
SchemaUtil.getTableName(TestUtil.DEFAULT_SCHEMA_NAME, indexName);
+conn.setAutoCommit(false);
+String tableName = "TBL_" + generateUniqueName();
+String indexName = "IDX_" + generateUniqueName();
+String fullTableName = 
SchemaUtil.getTableName(TestUtil.DEFAULT_SCHEMA_NAME, tableName);
+String fullIndexName = 
SchemaUtil.getTableName(TestUtil.DEFAULT_SCHEMA_NAME, indexName);
 
-   TestUtil.createMultiCFTestTable(conn, fullTableName, 
tableDDLOptions);
+TestUtil.createMultiCFTestTable(conn, fullTableName, 
tableDDLOptions);
 populateMultiCFTestTable(fullTableName);
 conn.createStatement().execute("CREATE " + (localIndex ? " LOCAL " 
: "") + " INDEX " + indexName + " ON " + fullTableName 
-   + " (char_col1 ASC, int_col1 ASC) INCLUDE (long_col1, 
long_col2)");
++ " (char_col1 ASC, int_col1 ASC) INCLUDE (long_col1, 
long_col2)");
 
 String query = "SELECT char_col1, int_col1, long_col2 from " + 
fullTableName;
 ResultSet rs = conn.createStatement().executeQuery("EXPLAIN " + 
query);
@@ -203,7 +203,7 @@ public class MutableIndexIT extends ParallelStatsDisabledIT 
{
 query = "SELECT b.* from " + fullTableName + " where int_col1 
= 4";
 rs = conn.createStatement()

[03/50] [abbrv] phoenix git commit: PHOENIX-4744 Reduce parallelism in integration test runs

2018-07-25 Thread elserj
PHOENIX-4744 Reduce parallelism in integration test runs


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/58415e2f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/58415e2f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/58415e2f

Branch: refs/heads/4.x-HBase-1.4
Commit: 58415e2f31617ec543cb01e8bc27ce44c4efbe0d
Parents: 48b6f99
Author: James Taylor 
Authored: Fri May 18 08:50:38 2018 -0700
Committer: James Taylor 
Committed: Fri May 18 17:01:47 2018 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/58415e2f/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 87c2db6..38c1c41 100644
--- a/pom.xml
+++ b/pom.xml
@@ -122,7 +122,7 @@
 
 
 8
-8
+4
 false
 false
 



[13/50] [abbrv] phoenix git commit: PHOENIX-4544 Update statistics inconsistent behavior

2018-07-25 Thread elserj
PHOENIX-4544 Update statistics inconsistent behavior


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1b18d347
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1b18d347
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1b18d347

Branch: refs/heads/master
Commit: 1b18d3474d2e3deca429374dac60062b48fe1592
Parents: 5aebc96
Author: Ankit Singhal 
Authored: Thu Jun 7 11:01:14 2018 -0700
Committer: Ankit Singhal 
Committed: Thu Jun 7 11:01:14 2018 -0700

--
 .../StatisticsCollectionRunTrackerIT.java   | 32 +-
 .../UngroupedAggregateRegionObserver.java   |  7 +--
 .../apache/phoenix/schema/MetaDataClient.java   |  8 ++--
 .../stats/StatisticsCollectionRunTracker.java   | 46 +---
 .../java/org/apache/phoenix/util/ByteUtil.java  | 16 ++-
 5 files changed, 83 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/1b18d347/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
index 71c9e01..cdf1fde 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/coprocessor/StatisticsCollectionRunTrackerIT.java
@@ -25,12 +25,15 @@ import static org.junit.Assert.assertTrue;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.ParallelStatsEnabledIT;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.stats.StatisticsCollectionRunTracker;
@@ -60,12 +63,15 @@ public class StatisticsCollectionRunTrackerIT extends 
ParallelStatsEnabledIT {
 StatisticsCollectionRunTracker tracker =
 StatisticsCollectionRunTracker.getInstance(new 
Configuration());
 // assert that the region wasn't added to the tracker
-assertTrue(tracker.addUpdateStatsCommandRegion(regionInfo));
+assertTrue(tracker.addUpdateStatsCommandRegion(regionInfo, new 
HashSet(Arrays.asList(Bytes.toBytes("0");
+assertTrue(tracker.addUpdateStatsCommandRegion(regionInfo, new 
HashSet(Arrays.asList(Bytes.toBytes("L#0");
 // assert that removing the region from the tracker works
-assertTrue(tracker.removeUpdateStatsCommandRegion(regionInfo));
+assertTrue(tracker.removeUpdateStatsCommandRegion(regionInfo, new 
HashSet(Arrays.asList(Bytes.toBytes("0");
+assertTrue(tracker.removeUpdateStatsCommandRegion(regionInfo, new 
HashSet(Arrays.asList(Bytes.toBytes("L#0");
 runUpdateStats(tableName);
 // assert that after update stats is complete, tracker isn't tracking 
the region any more
-assertFalse(tracker.removeUpdateStatsCommandRegion(regionInfo));
+assertFalse(tracker.removeUpdateStatsCommandRegion(regionInfo, new 
HashSet(Arrays.asList(Bytes.toBytes("0");
+assertFalse(tracker.removeUpdateStatsCommandRegion(regionInfo, new 
HashSet(Arrays.asList(Bytes.toBytes("L#0");;
 }
 
 @Test
@@ -102,25 +108,28 @@ public class StatisticsCollectionRunTrackerIT extends 
ParallelStatsEnabledIT {
 RegionInfo regionInfo = createTableAndGetRegion(tableName);
 // simulate stats collection via major compaction by marking the 
region as compacting in the tracker
 markRegionAsCompacting(regionInfo);
-Assert.assertEquals("Row count didn't match", 
COMPACTION_UPDATE_STATS_ROW_COUNT, runUpdateStats(tableName));
+//there will be no update for local index and a table , so checking 2 
* COMPACTION_UPDATE_STATS_ROW_COUNT
+Assert.assertEquals("Row count didn't match", 
COMPACTION_UPDATE_STATS_ROW_COUNT * 2, runUpdateStats(tableName));
 StatisticsCollectionRunTracker tracker =
 StatisticsCollectionRunTracker.getInstance(new 
Configuration());
 // assert that the tracker state was cleared.
-assertFalse(tracker.removeUpdateStatsCommandRegion(regionInfo));
+HashSet familyMap = new 
HashSet(Arrays.asList(Bytes.toBytes("0")));
+
assertFalse(tracker.removeUpdateStatsCommandRegion(regionInfo,familyMap));
 }
 
 @Test
 public

[21/50] [abbrv] phoenix git commit: PHOENIX-4773 Move HTable rollback wrapper into Tephra TAL method

2018-07-25 Thread elserj
PHOENIX-4773 Move HTable rollback wrapper into Tephra TAL method


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/32154dfe
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/32154dfe
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/32154dfe

Branch: refs/heads/4.x-HBase-1.4
Commit: 32154dfe01e769b5bc84e85c8de481abf9c1a2cc
Parents: d900771
Author: James Taylor 
Authored: Mon Jun 4 20:27:36 2018 -0700
Committer: James Taylor 
Committed: Tue Jun 5 22:03:56 2018 -0700

--
 .../apache/phoenix/cache/ServerCacheClient.java |  21 +-
 .../apache/phoenix/execute/HashJoinPlan.java|   7 +-
 .../apache/phoenix/execute/MutationState.java   | 190 ++-
 .../PhoenixTxIndexMutationGenerator.java|  42 
 .../phoenix/index/IndexMetaDataCacheClient.java |  67 ++-
 .../apache/phoenix/join/HashCacheClient.java|   5 +-
 .../transaction/OmidTransactionContext.java |   3 +-
 .../transaction/PhoenixTransactionContext.java  |   5 +-
 .../transaction/TephraTransactionContext.java   |  91 -
 .../java/org/apache/phoenix/util/IndexUtil.java |   8 +
 10 files changed, 230 insertions(+), 209 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/32154dfe/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java 
b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
index 68de747..5e284bd 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/cache/ServerCacheClient.java
@@ -70,7 +70,6 @@ import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
-import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.Closeables;
 import org.apache.phoenix.util.SQLCloseable;
 import org.apache.phoenix.util.SQLCloseables;
@@ -90,7 +89,7 @@ public class ServerCacheClient {
 private static final Random RANDOM = new Random();
public static final String HASH_JOIN_SERVER_CACHE_RESEND_PER_SERVER = 
"hash.join.server.cache.resend.per.server";
 private final PhoenixConnection connection;
-private final Map cacheUsingTableRefMap = new 
ConcurrentHashMap();
+private final Map cacheUsingTableMap = new 
ConcurrentHashMap();
 
 /**
  * Construct client used to create a serialized cached snapshot of a table 
and send it to each region server
@@ -220,12 +219,12 @@ public class ServerCacheClient {
 }
 
 public ServerCache addServerCache(ScanRanges keyRanges, final 
ImmutableBytesWritable cachePtr, final byte[] txState,
-final ServerCacheFactory cacheFactory, final TableRef 
cacheUsingTableRef) throws SQLException {
-return addServerCache(keyRanges, cachePtr, txState, cacheFactory, 
cacheUsingTableRef, false);
+final ServerCacheFactory cacheFactory, final PTable 
cacheUsingTable) throws SQLException {
+return addServerCache(keyRanges, cachePtr, txState, cacheFactory, 
cacheUsingTable, false);
 }
 
 public ServerCache addServerCache(ScanRanges keyRanges, final 
ImmutableBytesWritable cachePtr, final byte[] txState,
-final ServerCacheFactory cacheFactory, final TableRef 
cacheUsingTableRef, boolean storeCacheOnClient)
+final ServerCacheFactory cacheFactory, final PTable 
cacheUsingTable, boolean storeCacheOnClient)
 throws SQLException {
 ConnectionQueryServices services = connection.getQueryServices();
 List closeables = new ArrayList();
@@ -241,7 +240,6 @@ public class ServerCacheClient {
 ExecutorService executor = services.getExecutor();
 List> futures = Collections.emptyList();
 try {
-final PTable cacheUsingTable = cacheUsingTableRef.getTable();
 List locations = 
services.getAllTableRegions(cacheUsingTable.getPhysicalName().getBytes());
 int nRegions = locations.size();
 // Size these based on worst case
@@ -258,7 +256,7 @@ public class ServerCacheClient {
 servers.add(entry);
 if (LOG.isDebugEnabled()) 
{LOG.debug(addCustomAnnotations("Adding cache entry to be sent for " + entry, 
connection));}
 final byte[] key = 
getKeyInRegion(entry.getRegionInfo().getStartKey());
-final HTableInterface htable = 
services.getTable(cacheUsingTableRef.getTable().getPhysicalName().getBytes());
+final HTableInterface htable = 
services.getTable(cacheU

[33/50] [abbrv] phoenix git commit: PHOENIX-4790 Addendum to check that query is a point lookup for delete not to run query

2018-07-25 Thread elserj
PHOENIX-4790 Addendum to check that query is a point lookup for delete not to 
run query


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/28832267
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/28832267
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/28832267

Branch: refs/heads/4.x-HBase-1.4
Commit: 288322678de7e443f959f55eaab946716ca517d8
Parents: aee568b
Author: James Taylor 
Authored: Wed Jul 11 22:01:14 2018 -0700
Committer: James Taylor 
Committed: Wed Jul 11 22:01:14 2018 -0700

--
 .../src/main/java/org/apache/phoenix/compile/DeleteCompiler.java  | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/28832267/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 78b2db9..a214c24 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -541,6 +541,9 @@ public class DeleteCompiler {
 Iterator iterator = queryPlans.iterator();
 while (iterator.hasNext()) {
 QueryPlan plan = iterator.next();
+// Must be a point lookup in order to not run a query since
+// we have to have the full key be enumerated.
+noQueryReqd &= plan.getContext().getScanRanges().isPointLookup();
 if (plan.getTableRef().getTable().getIndexType() == 
IndexType.LOCAL) {
 if (!plan.getContext().getDataColumns().isEmpty()) {
 iterator.remove();



[38/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
index 1634159..aa3e1a6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/UpgradeUtil.java
@@ -99,6 +99,9 @@ import org.apache.phoenix.coprocessor.MetaDataEndpointImpl;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
+import org.apache.phoenix.coprocessor.TableInfo;
+import org.apache.phoenix.coprocessor.TableViewFinderResult;
+import org.apache.phoenix.coprocessor.ViewFinder;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.query.QueryConstants;
@@ -178,11 +181,6 @@ public class UpgradeUtil {
 private static final String DELETE_LINK = "DELETE FROM " + 
SYSTEM_CATALOG_SCHEMA + "." + SYSTEM_CATALOG_TABLE
 + " WHERE (" + TABLE_SCHEM + "=? OR (" + TABLE_SCHEM + " IS NULL 
AND ? IS NULL)) AND " + TABLE_NAME + "=? AND " + COLUMN_FAMILY + "=? AND " + 
LINK_TYPE + " = " + LinkType.PHYSICAL_TABLE.getSerializedValue();
 
-private static final String GET_VIEWS_QUERY = "SELECT " + TENANT_ID + "," 
+ TABLE_SCHEM + "," + TABLE_NAME
-+ " FROM " + SYSTEM_CATALOG_SCHEMA + "." + SYSTEM_CATALOG_TABLE + 
" WHERE " + COLUMN_FAMILY + " = ? AND "
-+ LINK_TYPE + " = " + LinkType.PHYSICAL_TABLE.getSerializedValue() 
+ " AND ( " + TABLE_TYPE + "=" + "'"
-+ PTableType.VIEW.getSerializedValue() + "' OR " + TABLE_TYPE + " 
IS NULL) ORDER BY "+TENANT_ID;
-
 private UpgradeUtil() {
 }
 
@@ -1153,6 +1151,78 @@ public class UpgradeUtil {
 }
 }
 
+/**
+ * Move child links form SYSTEM.CATALOG to SYSTEM.CHILD_LINK
+ * @param oldMetaConnection caller should take care of closing the passed 
connection appropriately
+ * @throws SQLException
+ */
+public static void moveChildLinks(PhoenixConnection oldMetaConnection) 
throws SQLException {
+PhoenixConnection metaConnection = null;
+try {
+// Need to use own connection with max time stamp to be able to 
read all data from SYSTEM.CATALOG 
+metaConnection = new PhoenixConnection(oldMetaConnection, 
HConstants.LATEST_TIMESTAMP);
+logger.info("Upgrading metadata to add parent to child links for 
views");
+metaConnection.commit();
+String createChildLink = "UPSERT INTO SYSTEM.CHILD_LINK(TENANT_ID, 
TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, COLUMN_FAMILY, LINK_TYPE) " +
+"SELECT TENANT_ID, TABLE_SCHEM, 
TABLE_NAME, COLUMN_NAME, COLUMN_FAMILY, LINK_TYPE " + 
+"FROM SYSTEM.CATALOG " + 
+"WHERE LINK_TYPE = 4";
+metaConnection.createStatement().execute(createChildLink);
+metaConnection.commit();
+String deleteChildLink = "DELETE FROM SYSTEM.CATALOG WHERE 
LINK_TYPE = 4 ";
+metaConnection.createStatement().execute(deleteChildLink);
+metaConnection.commit();
+metaConnection.getQueryServices().clearCache();
+} finally {
+if (metaConnection != null) {
+metaConnection.close();
+}
+}
+}
+
+public static void addViewIndexToParentLinks(PhoenixConnection 
oldMetaConnection) throws SQLException {
+   // Need to use own connection with max time stamp to be able to read 
all data from SYSTEM.CATALOG 
+try (PhoenixConnection queryConn = new 
PhoenixConnection(oldMetaConnection, HConstants.LATEST_TIMESTAMP);
+   PhoenixConnection upsertConn = new 
PhoenixConnection(oldMetaConnection, HConstants.LATEST_TIMESTAMP)) {
+logger.info("Upgrading metadata to add parent links for indexes on 
views");
+   String indexQuery = "SELECT TENANT_ID, TABLE_SCHEM, 
TABLE_NAME, COLUMN_FAMILY FROM SYSTEM.CATALOG WHERE LINK_TYPE = "
+   + 
LinkType.INDEX_TABLE.getSerializedValue();
+   String createViewIndexLink = "UPSERT INTO 
SYSTEM.CATALOG (TENANT_ID, TABLE_SCHEM, TABLE_NAME, COLUMN_FAMILY, LINK_TYPE) 
VALUES (?,?,?,?,?) ";
+ResultSet rs = 
queryConn.createStatement().executeQuery(indexQuery);
+String prevTenantId = null;
+PhoenixConnection metaConn = queryConn;
+Properties props = new Properties(queryConn.getClientInfo());
+   props.setProperty

[08/50] [abbrv] phoenix git commit: PHOENIX-2715 Query Log (addendum 2)

2018-07-25 Thread elserj
PHOENIX-2715 Query Log (addendum 2)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8d80d5a9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8d80d5a9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8d80d5a9

Branch: refs/heads/4.x-HBase-1.4
Commit: 8d80d5a987376dfde822ea60be93618682a45d9c
Parents: f043061
Author: Ankit Singhal 
Authored: Thu May 24 22:36:03 2018 -0700
Committer: Ankit Singhal 
Committed: Thu May 24 22:36:03 2018 -0700

--
 bin/hbase-site.xml | 4 
 1 file changed, 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8d80d5a9/bin/hbase-site.xml
--
diff --git a/bin/hbase-site.xml b/bin/hbase-site.xml
index 2f360e2..0ab9fd8 100644
--- a/bin/hbase-site.xml
+++ b/bin/hbase-site.xml
@@ -24,8 +24,4 @@
 hbase.regionserver.wal.codec
 org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec
   
-  
-phoenix.log.level
-DEBUG
-  
 



[39/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
index 45aca98..a267629 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
@@ -17,6 +17,7 @@
  */
 package org.apache.phoenix.schema;
 
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.query.QueryConstants;
@@ -42,36 +43,63 @@ public class PColumnImpl implements PColumn {
 private boolean isRowTimestamp;
 private boolean isDynamic;
 private byte[] columnQualifierBytes;
-
+private boolean derived;
+private long timestamp;
+
 public PColumnImpl() {
 }
 
-public PColumnImpl(PName name,
-   PName familyName,
-   PDataType dataType,
-   Integer maxLength,
-   Integer scale,
-   boolean nullable,
-   int position,
-   SortOrder sortOrder, Integer arrSize, byte[] 
viewConstant, boolean isViewReferenced, String expressionStr, boolean 
isRowTimestamp, boolean isDynamic, byte[] columnQualifierBytes) {
-init(name, familyName, dataType, maxLength, scale, nullable, position, 
sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, 
isRowTimestamp, isDynamic, columnQualifierBytes);
+public PColumnImpl(PColumn column, int position) {
+this(column, column.isDerived(), position);
 }
 
-public PColumnImpl(PColumn column, int position) {
+public PColumnImpl(PColumn column, byte[] viewConstant, boolean 
isViewReferenced) {
+this(column.getName(), column.getFamilyName(), column.getDataType(), 
column.getMaxLength(),
+column.getScale(), column.isNullable(), column.getPosition(), 
column.getSortOrder(), column.getArraySize(), viewConstant, isViewReferenced, 
column.getExpressionStr(), column.isRowTimestamp(), column.isDynamic(), 
column.getColumnQualifierBytes(),
+column.getTimestamp(), column.isDerived());
+}
+
+public PColumnImpl(PColumn column, boolean derivedColumn, int position) {
+this(column, derivedColumn, position, column.getViewConstant());
+}
+
+public PColumnImpl(PColumn column, boolean derivedColumn, int position, 
byte[] viewConstant) {
 this(column.getName(), column.getFamilyName(), column.getDataType(), 
column.getMaxLength(),
-column.getScale(), column.isNullable(), position, 
column.getSortOrder(), column.getArraySize(), column.getViewConstant(), 
column.isViewReferenced(), column.getExpressionStr(), column.isRowTimestamp(), 
column.isDynamic(), column.getColumnQualifierBytes());
+column.getScale(), column.isNullable(), position, 
column.getSortOrder(), column.getArraySize(), viewConstant, 
column.isViewReferenced(), column.getExpressionStr(), column.isRowTimestamp(), 
column.isDynamic(), column.getColumnQualifierBytes(),
+column.getTimestamp(), derivedColumn);
+}
+
+public PColumnImpl(PName name, PName familyName, PDataType dataType, 
Integer maxLength, Integer scale, boolean nullable,
+int position, SortOrder sortOrder, Integer arrSize, byte[] 
viewConstant, boolean isViewReferenced, String expressionStr, boolean 
isRowTimestamp, boolean isDynamic,
+byte[] columnQualifierBytes, long timestamp) {
+this(name, familyName, dataType, maxLength, scale, nullable, position, 
sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, 
isRowTimestamp, isDynamic, columnQualifierBytes, timestamp, false);
+}
+
+public PColumnImpl(PName name, PName familyName, PDataType dataType, 
Integer maxLength, Integer scale, boolean nullable,
+int position, SortOrder sortOrder, Integer arrSize, byte[] 
viewConstant, boolean isViewReferenced, String expressionStr, boolean 
isRowTimestamp, boolean isDynamic,
+byte[] columnQualifierBytes, long timestamp, boolean derived) {
+init(name, familyName, dataType, maxLength, scale, nullable, position, 
sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, 
isRowTimestamp, isDynamic, columnQualifierBytes, timestamp, derived);
+}
+
+private PColumnImpl(PName familyName, PName columnName, Long timestamp) {
+this.familyName = familyName;
+this.name = columnName;
+this.derived = true;
+if (timestamp!=null) {
+this.timestamp = timestamp;
+}
 }
 
-private void init(PName name,
-PName familyName,
-PDataType da

[02/50] [abbrv] phoenix git commit: PHOENIX-4704 Presplit index tables when building asynchronously

2018-07-25 Thread elserj
PHOENIX-4704 Presplit index tables when building asynchronously


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6ab9b372
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6ab9b372
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6ab9b372

Branch: refs/heads/4.x-HBase-1.4
Commit: 6ab9b372f16f37b11e657b6803c6a60007815824
Parents: cb17adb
Author: Vincent Poon 
Authored: Fri May 18 11:22:26 2018 -0700
Committer: Vincent Poon 
Committed: Fri May 18 16:42:53 2018 -0700

--
 .../org/apache/phoenix/end2end/IndexToolIT.java | 106 +-
 .../phoenix/mapreduce/index/IndexTool.java  | 142 ++-
 2 files changed, 242 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6ab9b372/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
index afb6d72..a120aaa 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/IndexToolIT.java
@@ -21,12 +21,15 @@ import static 
org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -34,8 +37,16 @@ import java.util.Properties;
 import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.mapreduce.index.IndexTool;
-import org.apache.phoenix.query.BaseTest;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.util.PropertiesUtil;
@@ -54,7 +65,7 @@ import com.google.common.collect.Maps;
 
 @RunWith(Parameterized.class)
 @Category(NeedsOwnMiniClusterTest.class)
-public class IndexToolIT extends BaseTest {
+public class IndexToolIT extends ParallelStatsEnabledIT {
 
 private final boolean localIndex;
 private final boolean transactional;
@@ -85,7 +96,7 @@ public class IndexToolIT extends BaseTest {
 }
 
 @BeforeClass
-public static void doSetup() throws Exception {
+public static void setup() throws Exception {
 Map serverProps = Maps.newHashMapWithExpectedSize(2);
 serverProps.put(QueryServices.EXTRA_JDBC_ARGUMENTS_ATTRIB,
 QueryServicesOptions.DEFAULT_EXTRA_JDBC_ARGUMENTS);
@@ -249,6 +260,86 @@ public class IndexToolIT extends BaseTest {
 }
 }
 
+/**
+ * Test presplitting an index table
+ */
+@Test
+public void testSplitIndex() throws Exception {
+if (localIndex) return; // can't split local indexes
+String schemaName = generateUniqueName();
+String dataTableName = generateUniqueName();
+String dataTableFullName = SchemaUtil.getTableName(schemaName, 
dataTableName);
+final TableName dataTN = TableName.valueOf(dataTableFullName);
+String indexTableName = generateUniqueName();
+String indexTableFullName = SchemaUtil.getTableName(schemaName, 
indexTableName);
+TableName indexTN = TableName.valueOf(indexTableFullName);
+try (Connection conn =
+DriverManager.getConnection(getUrl(), 
PropertiesUtil.deepCopy(TEST_PROPERTIES));
+HBaseAdmin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+String dataDDL =
+"CREATE TABLE " + dataTableFullName + "(\n"
++ "ID VARCHAR NOT NULL PRIMARY KEY,\n"
++ "\"info\".CAR_NUM VARCHAR(18) NULL,\n"
++ "\"test\".CAR_NUM VARCHAR(18) NULL,\n"
++ "\"info\".CAP_DATE VARCHAR NULL,\n" + 
"\"info\".ORG_ID BIGINT NULL,\n"
++ "\"info\".ORG_NAME VARCHAR(255) NULL\n" + ") 
COLUMN_ENCODED_BYTES = 0";
+

[05/50] [abbrv] phoenix git commit: PHOENIX-4692 ArrayIndexOutOfBoundsException in ScanRanges.intersectScan

2018-07-25 Thread elserj
PHOENIX-4692 ArrayIndexOutOfBoundsException in ScanRanges.intersectScan


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/28b9de0d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/28b9de0d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/28b9de0d

Branch: refs/heads/4.x-HBase-1.4
Commit: 28b9de0da01b61e61c749ed433ddb995596b3e45
Parents: 58415e2
Author: maryannxue 
Authored: Fri May 18 19:46:29 2018 -0700
Committer: maryannxue 
Committed: Fri May 18 19:46:29 2018 -0700

--
 .../apache/phoenix/end2end/SkipScanQueryIT.java | 21 
 .../apache/phoenix/compile/WhereCompiler.java   | 12 +--
 .../apache/phoenix/execute/BaseQueryPlan.java   |  2 +-
 .../apache/phoenix/execute/HashJoinPlan.java|  5 -
 4 files changed, 32 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/28b9de0d/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
index d98bbe2..fb0b568 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
@@ -563,4 +563,25 @@ public class SkipScanQueryIT extends 
ParallelStatsDisabledIT {
 assertFalse(rs.next());
 }
 }
+
+@Test
+public void testSkipScanJoinOptimization() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String tableName = generateUniqueName();
+String viewName = generateUniqueName();
+String idxName = "IDX_" + tableName;
+conn.setAutoCommit(true);
+conn.createStatement().execute(
+"create table " + tableName + " (PK1 INTEGER NOT NULL, PK2 
INTEGER NOT NULL, " +
+" ID1 INTEGER, ID2 INTEGER CONSTRAINT PK PRIMARY 
KEY(PK1 , PK2))SALT_BUCKETS = 4");
+conn.createStatement().execute("upsert into " + tableName + " 
values (1,1,1,1)");
+conn.createStatement().execute("upsert into " + tableName + " 
values (2,2,2,2)");
+conn.createStatement().execute("upsert into " + tableName + " 
values (2,3,1,2)");
+conn.createStatement().execute("create view " + viewName + " as 
select * from " +
+tableName + " where PK1 in (1,2)");
+conn.createStatement().execute("create index " + idxName + " on " 
+ viewName + " (ID1)");
+ResultSet rs = conn.createStatement().executeQuery("select /*+ 
INDEX(" + viewName + " " + idxName + ") */ * from " + viewName + " where ID1 = 
1 ");
+assertTrue(rs.next());
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/28b9de0d/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
index 2cf5857..832b1f0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
@@ -105,9 +105,9 @@ public class WhereCompiler {
  * @throws AmbiguousColumnException if an unaliased column name is 
ambiguous across multiple tables
  */
 public static Expression compile(StatementContext context, 
FilterableStatement statement, ParseNode viewWhere, Set 
subqueryNodes) throws SQLException {
-return compile(context, statement, viewWhere, 
Collections.emptyList(), false, subqueryNodes);
+return compile(context, statement, viewWhere, 
Collections.emptyList(), subqueryNodes);
 }
-
+
 /**
  * Optimize scan ranges by applying dynamically generated filter 
expressions.
  * @param context the shared context during query compilation
@@ -118,7 +118,7 @@ public class WhereCompiler {
  * @throws ColumnNotFoundException if column name could not be resolved
  * @throws AmbiguousColumnException if an unaliased column name is 
ambiguous across multiple tables
  */
-public static Expression compile(StatementContext context, 
FilterableStatement statement, ParseNode viewWhere, List 
dynamicFilters, boolean hashJoinOptimization, Set 
subqueryNodes) throws SQLException {
+public static Expression compile(StatementContext context, 
FilterableStatement statement, ParseNode viewWhere, List 
dynamicFilters, Set subqueryNodes) throws SQLException {
 ParseN

[12/50] [abbrv] phoenix git commit: Set version to 4.14.0-HBase-1.4 for release

2018-07-25 Thread elserj
Set version to 4.14.0-HBase-1.4 for release


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d6012ca1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d6012ca1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d6012ca1

Branch: refs/heads/4.x-HBase-1.4
Commit: d6012ca19fee70ed20269d6cca0cd43cdc39b612
Parents: c35daba
Author: Mujtaba 
Authored: Fri May 25 16:40:04 2018 -0700
Committer: Mujtaba 
Committed: Fri May 25 16:40:04 2018 -0700

--
 phoenix-assembly/pom.xml   | 2 +-
 phoenix-client/pom.xml | 2 +-
 phoenix-core/pom.xml   | 2 +-
 phoenix-flume/pom.xml  | 2 +-
 phoenix-hive/pom.xml   | 2 +-
 phoenix-kafka/pom.xml  | 2 +-
 phoenix-load-balancer/pom.xml  | 2 +-
 phoenix-pherf/pom.xml  | 2 +-
 phoenix-pig/pom.xml| 2 +-
 phoenix-queryserver-client/pom.xml | 2 +-
 phoenix-queryserver/pom.xml| 2 +-
 phoenix-server/pom.xml | 2 +-
 phoenix-spark/pom.xml  | 2 +-
 phoenix-tracing-webapp/pom.xml | 2 +-
 pom.xml| 2 +-
 15 files changed, 15 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index b7fb04c..9850420 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-4.14.0-HBase-1.4-SNAPSHOT
+4.14.0-HBase-1.4
   
   phoenix-assembly
   Phoenix Assembly

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index d65da69..56cdfbf 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-4.14.0-HBase-1.4-SNAPSHOT
+4.14.0-HBase-1.4
   
   phoenix-client
   Phoenix Client

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 2bb7293..9e9fed9 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -4,7 +4,7 @@
   
 org.apache.phoenix
 phoenix
-4.14.0-HBase-1.4-SNAPSHOT
+4.14.0-HBase-1.4
   
   phoenix-core
   Phoenix Core

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index 3780d8d..6da777a 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -26,7 +26,7 @@
   
 org.apache.phoenix
 phoenix
-4.14.0-HBase-1.4-SNAPSHOT
+4.14.0-HBase-1.4
   
   phoenix-flume
   Phoenix - Flume

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index 8fbc447..6bb4f02 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-4.14.0-HBase-1.4-SNAPSHOT
+4.14.0-HBase-1.4
   
   phoenix-hive
   Phoenix - Hive

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-kafka/pom.xml
--
diff --git a/phoenix-kafka/pom.xml b/phoenix-kafka/pom.xml
index 09547c5..b104cfb 100644
--- a/phoenix-kafka/pom.xml
+++ b/phoenix-kafka/pom.xml
@@ -26,7 +26,7 @@

org.apache.phoenix
phoenix
-   4.14.0-HBase-1.4-SNAPSHOT
+   4.14.0-HBase-1.4

phoenix-kafka
Phoenix - Kafka

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-load-balancer/pom.xml
--
diff --git a/phoenix-load-balancer/pom.xml b/phoenix-load-balancer/pom.xml
index ec4a1ad..388f66f 100644
--- a/phoenix-load-balancer/pom.xml
+++ b/phoenix-load-balancer/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-4.14.0-HBase-1.4-SNAPSHOT
+4.14.0-HBase-1.4
   
   phoenix-load-balancer
   Phoenix Load Balancer

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d6012ca1/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index 7f4815c..81fe4bd 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -15,7 +15,7 @@

  

[25/50] [abbrv] phoenix git commit: PHOENIX-4776 Remove creation of .md5 files from dev/make_rc.sh

2018-07-25 Thread elserj
PHOENIX-4776 Remove creation of .md5 files from dev/make_rc.sh


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c233c15c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c233c15c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c233c15c

Branch: refs/heads/4.x-HBase-1.4
Commit: c233c15c1c8b5af1d96d143d486f0afcad786518
Parents: 406eb70
Author: Pedro Boado 
Authored: Mon Jun 11 23:35:56 2018 +0100
Committer: Pedro Boado 
Committed: Mon Jun 11 23:35:56 2018 +0100

--
 dev/make_rc.sh | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c233c15c/dev/make_rc.sh
--
diff --git a/dev/make_rc.sh b/dev/make_rc.sh
index 47439d3..f6cd06c 100755
--- a/dev/make_rc.sh
+++ b/dev/make_rc.sh
@@ -106,13 +106,11 @@ function_sign() {
   # if on MAC OS
   if [[ "$OSTYPE" == "darwin"* ]]; then
 gpg2 --armor --output $phoenix_tar.asc --detach-sig $phoenix_tar;
-openssl md5 $phoenix_tar > $phoenix_tar.md5;
 openssl dgst -sha512 $phoenix_tar > $phoenix_tar.sha512;
 openssl dgst -sha256 $phoenix_tar >> $phoenix_tar.sha256;
   # all other OS
   else
 gpg --armor --output $phoenix_tar.asc --detach-sig $phoenix_tar;
-md5sum -b $phoenix_tar > $phoenix_tar.md5;
 sha512sum -b $phoenix_tar > $phoenix_tar.sha512;
 sha256sum -b $phoenix_tar >> $phoenix_tar.sha256;
   fi



[27/50] [abbrv] phoenix git commit: PHOENIX-4789 Exception when setting TTL on Tephra transactional table

2018-07-25 Thread elserj
PHOENIX-4789 Exception when setting TTL on Tephra transactional table


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9dbe20ac
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9dbe20ac
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9dbe20ac

Branch: refs/heads/4.x-HBase-1.4
Commit: 9dbe20ac7c90710e4ce37eb84be31b9e5e94bc87
Parents: 6acdae0
Author: James Taylor 
Authored: Mon Jun 18 15:00:02 2018 +0200
Committer: James Taylor 
Committed: Mon Jun 18 15:00:02 2018 +0200

--
 .../org/apache/phoenix/tx/TransactionIT.java| 30 
 .../query/ConnectionQueryServicesImpl.java  |  3 ++
 2 files changed, 33 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9dbe20ac/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
index c0ec6b8..12c3b7a 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/tx/TransactionIT.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -38,6 +39,9 @@ import java.util.Properties;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.TableNotFoundException;
+import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -54,6 +58,7 @@ import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.StringUtil;
 import org.apache.phoenix.util.TestUtil;
+import org.apache.tephra.TxConstants;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -370,4 +375,29 @@ public class TransactionIT  extends 
ParallelStatsDisabledIT {
 conn.close();
 }
 }
+
+private static void assertTTL(Admin admin, String tableName, int ttl) 
throws TableNotFoundException, IOException {
+HTableDescriptor tableDesc = 
admin.getTableDescriptor(TableName.valueOf(tableName));
+for (HColumnDescriptor colDesc : tableDesc.getFamilies()) {
+
assertEquals(ttl,Integer.parseInt(colDesc.getValue(TxConstants.PROPERTY_TTL)));
+
assertEquals(HColumnDescriptor.DEFAULT_TTL,colDesc.getTimeToLive());
+}
+}
+
+@Test
+public void testSetTTL() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+TransactionFactory.Provider txProvider = 
TransactionFactory.Provider.valueOf(this.txProvider);
+try (Connection conn = DriverManager.getConnection(getUrl(), props); 
Admin admin = 
conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
+String tableName = generateUniqueName();
+conn.createStatement().execute("CREATE TABLE " + tableName + 
+"(K VARCHAR PRIMARY KEY) 
TRANSACTIONAL=true,TRANSACTION_PROVIDER='" + txProvider + "',TTL=100");
+assertTTL(admin, tableName, 100);
+tableName = generateUniqueName();
+conn.createStatement().execute("CREATE TABLE " + tableName + 
+"(K VARCHAR PRIMARY KEY) 
TRANSACTIONAL=true,TRANSACTION_PROVIDER='" + txProvider + "'");
+conn.createStatement().execute("ALTER TABLE " + tableName + " SET 
TTL=" + 200);
+assertTTL(admin, tableName, 200);
+}
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/9dbe20ac/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 63a598f..71d4b3d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -2234,6 +2234,8 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
 Map props = entry.getValue();
 if (props == null) {
   

[49/50] [abbrv] phoenix git commit: PHOENIX-4797 file not found or file exist exception when create global index use -snapshot option

2018-07-25 Thread elserj
PHOENIX-4797 file not found or file exist exception when create global index 
use -snapshot option


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f7927153
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f7927153
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f7927153

Branch: refs/heads/4.x-HBase-1.4
Commit: f7927153af2ce22f5b586306b5d6abf0d9231345
Parents: 135b890
Author: 492066199 <492066...@qq.com>
Authored: Fri Jul 6 10:45:38 2018 +0800
Committer: Karan Mehta 
Committed: Tue Jul 24 21:38:07 2018 -0700

--
 .../org/apache/phoenix/iterate/TableSnapshotResultIterator.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f7927153/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
index df60339..016d3be 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
@@ -39,6 +39,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.UUID;
 
 public class TableSnapshotResultIterator implements ResultIterator {
 
@@ -65,7 +66,8 @@ public class TableSnapshotResultIterator implements 
ResultIterator {
 this.scan = scan;
 this.scanMetricsHolder = scanMetricsHolder;
 this.scanIterator = UNINITIALIZED_SCANNER;
-this.restoreDir = new 
Path(configuration.get(PhoenixConfigurationUtil.RESTORE_DIR_KEY));
+this.restoreDir = new 
Path(configuration.get(PhoenixConfigurationUtil.RESTORE_DIR_KEY),
+UUID.randomUUID().toString());
 this.snapshotName = configuration.get(
 PhoenixConfigurationUtil.SNAPSHOT_NAME_KEY);
 this.rootDir = FSUtils.getRootDir(configuration);



[40/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
index 8dd4a88..dab1048 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
@@ -29,9 +29,10 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.compile.ColumnProjector;
 import org.apache.phoenix.compile.ExpressionProjector;
@@ -40,7 +41,12 @@ import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.KeyValueColumnExpression;
+import org.apache.phoenix.expression.LikeExpression;
+import org.apache.phoenix.expression.LiteralExpression;
 import org.apache.phoenix.expression.RowKeyColumnExpression;
+import org.apache.phoenix.expression.StringBasedLikeExpression;
 import org.apache.phoenix.expression.function.ExternalSqlTypeIdFunction;
 import org.apache.phoenix.expression.function.IndexStateNameFunction;
 import org.apache.phoenix.expression.function.SQLIndexTypeFunction;
@@ -48,25 +54,33 @@ import 
org.apache.phoenix.expression.function.SQLTableTypeFunction;
 import org.apache.phoenix.expression.function.SQLViewTypeFunction;
 import org.apache.phoenix.expression.function.SqlTypeNameFunction;
 import org.apache.phoenix.expression.function.TransactionProviderNameFunction;
-import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
-import org.apache.phoenix.iterate.DelegateResultIterator;
 import org.apache.phoenix.iterate.MaterializedResultIterator;
 import org.apache.phoenix.iterate.ResultIterator;
+import org.apache.phoenix.parse.LikeParseNode.LikeType;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.MetaDataClient;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.PNameFactory;
+import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.LinkType;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeyValueAccessor;
 import org.apache.phoenix.schema.SortOrder;
-import org.apache.phoenix.schema.tuple.ResultTuple;
+import org.apache.phoenix.schema.tuple.MultiKeyValueTuple;
 import org.apache.phoenix.schema.tuple.SingleKeyValueTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PInteger;
+import org.apache.phoenix.schema.types.PSmallint;
+import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.schema.types.PVarchar;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.KeyValueUtil;
+import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.StringUtil;
 
@@ -336,6 +350,11 @@ public class PhoenixDatabaseMetaData implements 
DatabaseMetaData {
 public static final byte[] COLUMN_QUALIFIER_COUNTER_BYTES = 
Bytes.toBytes(COLUMN_QUALIFIER_COUNTER);
 public static final String USE_STATS_FOR_PARALLELIZATION = 
"USE_STATS_FOR_PARALLELIZATION";
 public static final byte[] USE_STATS_FOR_PARALLELIZATION_BYTES = 
Bytes.toBytes(USE_STATS_FOR_PARALLELIZATION);
+
+public static final String SYSTEM_CHILD_LINK_TABLE = "CHILD_LINK";
+public static final String SYSTEM_CHILD_LINK_NAME = 
SchemaUtil.getTableName(SYSTEM_CATALOG_SCHEMA, SYSTEM_CHILD_LINK_TABLE);
+public static final byte[] SYSTEM_CHILD_LINK_NAME_BYTES = 
Bytes.toBytes(SYSTEM_CHILD_LINK_NAME);
+public static final TableName SYSTEM_LINK_HBASE_TABLE_NAME = 
TableName.valueOf(SYSTEM_CHILD_LINK_NAME);
 
 
 //SYSTEM:LOG
@@ -467,179 +486,352 @@ public class PhoenixDatabaseMetaData implements 
DatabaseMetaData {
 private static void appendConjunction(StringBuilder buf) {
 buf.append(buf.length() == 0 ? "" : " and ");
 }
-
+
+private static final PColumnImpl TENANT_ID_COLUMN = new

[04/50] [abbrv] phoenix git commit: PHOENIX-4742 DistinctPrefixFilter potentially seeks to lesser key when descending or null value

2018-07-25 Thread elserj
PHOENIX-4742 DistinctPrefixFilter potentially seeks to lesser key when 
descending or null value


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/48b6f99a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/48b6f99a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/48b6f99a

Branch: refs/heads/4.x-HBase-1.4
Commit: 48b6f99acdeb91e3167e7beeed49747f7b7dcc6c
Parents: 6ab9b37
Author: James Taylor 
Authored: Fri May 18 08:46:38 2018 -0700
Committer: James Taylor 
Committed: Fri May 18 17:01:47 2018 -0700

--
 .../org/apache/phoenix/end2end/OrderByIT.java   | 45 +---
 .../GroupedAggregateRegionObserver.java |  4 +-
 .../phoenix/filter/DistinctPrefixFilter.java| 31 ++
 .../apache/phoenix/filter/SkipScanFilter.java   |  4 +-
 .../org/apache/phoenix/schema/RowKeySchema.java | 20 +
 5 files changed, 61 insertions(+), 43 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/48b6f99a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OrderByIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OrderByIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OrderByIT.java
index 9d6a450..578a3af 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/OrderByIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/OrderByIT.java
@@ -27,10 +27,10 @@ import static org.apache.phoenix.util.TestUtil.ROW7;
 import static org.apache.phoenix.util.TestUtil.ROW8;
 import static org.apache.phoenix.util.TestUtil.ROW9;
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.apache.phoenix.util.TestUtil.assertResultSet;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.apache.phoenix.util.TestUtil.assertResultSet;
 
 import java.sql.Connection;
 import java.sql.Date;
@@ -663,7 +663,6 @@ public class OrderByIT extends ParallelStatsDisabledIT {
 conn = DriverManager.getConnection(getUrl(), props);
 
 String tableName=generateUniqueName();
-conn.createStatement().execute("DROP TABLE if exists "+tableName);
 String sql="CREATE TABLE "+tableName+" ( "+
 "ORGANIZATION_ID VARCHAR,"+
 "CONTAINER_ID VARCHAR,"+
@@ -871,26 +870,25 @@ public class OrderByIT extends ParallelStatsDisabledIT {
 }
 
 @Test
-public void testOrderByReverseOptimizationBug3491() throws Exception {
+public void testOrderByReverseOptimization() throws Exception {
 for(boolean salted: new boolean[]{true,false}) {
-doTestOrderByReverseOptimizationBug3491(salted,true,true,true);
-doTestOrderByReverseOptimizationBug3491(salted,true,true,false);
-doTestOrderByReverseOptimizationBug3491(salted,true,false,true);
-doTestOrderByReverseOptimizationBug3491(salted,true,false,false);
-doTestOrderByReverseOptimizationBug3491(salted,false,true,true);
-doTestOrderByReverseOptimizationBug3491(salted,false,true,false);
-doTestOrderByReverseOptimizationBug3491(salted,false,false,true);
-doTestOrderByReverseOptimizationBug3491(salted,false,false,false);
+doTestOrderByReverseOptimization(salted,true,true,true);
+doTestOrderByReverseOptimization(salted,true,true,false);
+doTestOrderByReverseOptimization(salted,true,false,true);
+doTestOrderByReverseOptimization(salted,true,false,false);
+doTestOrderByReverseOptimization(salted,false,true,true);
+doTestOrderByReverseOptimization(salted,false,true,false);
+doTestOrderByReverseOptimization(salted,false,false,true);
+doTestOrderByReverseOptimization(salted,false,false,false);
 }
 }
 
-private void doTestOrderByReverseOptimizationBug3491(boolean 
salted,boolean desc1,boolean desc2,boolean desc3) throws Exception {
+private void doTestOrderByReverseOptimization(boolean salted,boolean 
desc1,boolean desc2,boolean desc3) throws Exception {
 Connection conn = null;
 try {
 Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 conn = DriverManager.getConnection(getUrl(), props);
 String tableName=generateUniqueName();
-conn.createStatement().execute("DROP TABLE if exists "+tableName);
 String sql="CREATE TABLE "+tableName+" ( "+
 "ORGANIZATION_ID INTEGER NOT NULL,"+
 "CONTAINER_ID INTEGER NOT NULL,"+
@@ -965,26 +963,25 @@ public class OrderByIT extends ParallelStatsDisabledIT {
 }
 
 @Tes

[09/50] [abbrv] phoenix git commit: PHOENIX-4728 ARRAY_APPEND and ARRAY_REMOVE should work with null column value (Xavier Jodoin)

2018-07-25 Thread elserj
PHOENIX-4728 ARRAY_APPEND and ARRAY_REMOVE should work with null column value 
(Xavier Jodoin)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9335972f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9335972f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9335972f

Branch: refs/heads/4.x-HBase-1.4
Commit: 9335972fb1d231f9ff1cce8b55ca5753149cba2c
Parents: 8d80d5a
Author: James Taylor 
Authored: Fri May 25 09:36:41 2018 -0700
Committer: James Taylor 
Committed: Fri May 25 09:39:35 2018 -0700

--
 .../phoenix/end2end/ArrayAppendFunctionIT.java  | 38 
 .../apache/phoenix/compile/UpsertCompiler.java  |  2 +-
 2 files changed, 39 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/9335972f/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java
index caa17fe..7962a7a 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayAppendFunctionIT.java
@@ -132,6 +132,24 @@ public class ArrayAppendFunctionIT extends 
ParallelStatsDisabledIT {
 }
 
 @Test
+public void testUpsertEmptyArrayModification() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String tableName = initTables(conn);
+
+ResultSet rs;
+String[] strings = new String[]{"34567"};
+Array array = conn.createArrayOf("VARCHAR", strings);
+
+conn.createStatement().execute("UPSERT INTO " + tableName + " 
(region_name,nullVarChar) SELECT region_name,ARRAY_APPEND(nullVarChar,'34567') 
FROM " + tableName);
+conn.commit();
+
+rs = conn.createStatement().executeQuery("SELECT nullVarChar FROM " + 
tableName + " LIMIT 1");
+assertTrue(rs.next());
+assertEquals(array, rs.getArray(1));
+assertFalse(rs.next());
+}
+
+@Test
 public void testArrayAppendFunctionVarchar() throws Exception {
 Connection conn = DriverManager.getConnection(getUrl());
 String tableName = initTables(conn);
@@ -147,6 +165,26 @@ public class ArrayAppendFunctionIT extends 
ParallelStatsDisabledIT {
 assertEquals(array, rs.getArray(1));
 assertFalse(rs.next());
 }
+
+@Test
+public void testUpsertArrayAppendFunctionVarchar() throws Exception {
+Connection conn = DriverManager.getConnection(getUrl());
+String tableName = initTables(conn);
+
+conn.createStatement().execute("UPSERT INTO " + tableName + " 
(region_name,varchars) SELECT region_name,ARRAY_APPEND(varchars,'34567') as 
varchars FROM " + tableName+ " WHERE region_name = 'SF Bay Area'");
+conn.commit();
+
+ResultSet rs;
+rs = conn.createStatement().executeQuery("SELECT varchars FROM " + 
tableName + " WHERE region_name = 'SF Bay Area'");
+assertTrue(rs.next());
+
+String[] strings = new String[]{"2345", "46345", "23234", "34567"};
+
+Array array = conn.createArrayOf("VARCHAR", strings);
+
+assertEquals(array, rs.getArray(1));
+assertFalse(rs.next());
+}
 
 @Test
 public void testArrayAppendFunctionInteger() throws Exception {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/9335972f/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 30f0c18..c3cfa10 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -549,7 +549,7 @@ public class UpsertCompiler {
 select = SelectStatement.create(select, hint);
 // Pass scan through if same table in upsert and select so that 
projection is computed correctly
 // Use optimizer to choose the best plan
-QueryCompiler compiler = new QueryCompiler(statement, select, 
selectResolver, targetColumns, parallelIteratorFactoryToBe, new 
SequenceManager(statement), false, false, null);
+QueryCompiler compiler = new QueryCompiler(statement, select, 
selectResolver, targetColumns, parallelIteratorFactoryToBe, new 
SequenceManager(statement), true, false, null);
 queryPlanToBe = compiler.compile();
 // This is pos

[32/50] [abbrv] phoenix git commit: PHOENIX-4790 Simplify check for client side delete

2018-07-25 Thread elserj
PHOENIX-4790 Simplify check for client side delete


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d35a7519
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d35a7519
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d35a7519

Branch: refs/heads/4.x-HBase-1.4
Commit: d35a7519e65174978657b4c6254d595fac1b0009
Parents: 6195f8e
Author: James Taylor 
Authored: Tue Jun 19 16:33:21 2018 +0200
Committer: James Taylor 
Committed: Wed Jul 11 07:28:47 2018 -0700

--
 .../apache/phoenix/compile/DeleteCompiler.java  | 24 
 1 file changed, 5 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d35a7519/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 5f9c76c..78b2db9 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -46,7 +46,6 @@ import org.apache.phoenix.execute.AggregatePlan;
 import org.apache.phoenix.execute.MutationState;
 import org.apache.phoenix.execute.MutationState.MultiRowMutationState;
 import org.apache.phoenix.execute.MutationState.RowMutationState;
-import org.apache.phoenix.filter.SkipScanFilter;
 import org.apache.phoenix.hbase.index.ValueGetter;
 import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
@@ -481,6 +480,7 @@ public class DeleteCompiler {
 projectedColumns.add(column);
 aliasedNodes.add(FACTORY.aliasedNode(null, FACTORY.column(null, 
'"' + column.getName().getString() + '"', null)));
 }
+boolean noQueryReqd = true;
 // Project all non PK indexed columns so that we can do the proper 
index maintenance
 for (PTable index : table.getIndexes()) {
 IndexMaintainer maintainer = index.getIndexMaintainer(table, 
connection);
@@ -492,6 +492,8 @@ public class DeleteCompiler {
 boolean hasNoColumnFamilies = 
table.getColumnFamilies().isEmpty();
 PColumn column = hasNoColumnFamilies ? 
table.getColumnForColumnName(columnName) : 
table.getColumnFamily(familyName).getPColumnForColumnName(columnName);
 if(!projectedColumns.contains(column)) {
+// We must run a query if any index contains a non pk 
column
+noQueryReqd = false;
 projectedColumns.add(column);
 aliasedNodes.add(FACTORY.aliasedNode(null, 
FACTORY.column(hasNoColumnFamilies ? null : TableName.create(null, familyName), 
'"' + columnName + '"', null)));
 }
@@ -511,7 +513,7 @@ public class DeleteCompiler {
 select = StatementNormalizer.normalize(transformedSelect, 
resolverToBe);
 }
 final boolean hasPreOrPostProcessing = hasPreProcessing || 
hasPostProcessing;
-boolean noQueryReqd = !hasPreOrPostProcessing;
+noQueryReqd &= !hasPreOrPostProcessing;
 // No limit and no sub queries, joins, etc in where clause
 // Can't run on same server for transactional data, as we need the row 
keys for the data
 // that is being upserted for conflict detection purposes.
@@ -550,24 +552,8 @@ public class DeleteCompiler {
 }
 
 runOnServer &= queryPlans.get(0).getTableRef().getTable().getType() != 
PTableType.INDEX;
-
-// We need to have all indexed columns available in all immutable 
indexes in order
-// to generate the delete markers from the query. We also cannot have 
any filters
-// except for our SkipScanFilter for point lookups.
-// A simple check of the non existence of a where clause in the parse 
node is not sufficient, as the where clause
-// may have been optimized out. Instead, we check that there's a 
single SkipScanFilter
-// If we can generate a plan for every index, that means all the 
required columns are available in every index,
-// hence we can drive the delete from any of the plans.
-noQueryReqd &= queryPlans.size() == 1 + clientSideIndexes.size();
-int queryPlanIndex = 0;
-while (noQueryReqd && queryPlanIndex < queryPlans.size()) {
-QueryPlan plan = queryPlans.get(queryPlanIndex++);
-StatementContext context = plan.getContext();
-noQueryReqd &= (!context.getScan().hasFilter()
-|| context.getScan().getFilter() instanceof SkipScanFilter)

[41/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 883f96d..29cf2a3 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -91,8 +91,9 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_12_0 = 
MIN_SYSTEM_TABLE_TIMESTAMP_4_11_0;
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_13_0 = 
MIN_SYSTEM_TABLE_TIMESTAMP_4_11_0;
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
+public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_15_0 = 
MIN_TABLE_TIMESTAMP + 29;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
-public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 
MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0;
+public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 
MIN_SYSTEM_TABLE_TIMESTAMP_4_15_0;
 // Version below which we should disallow usage of mutable secondary 
indexing.
 public static final int MUTABLE_SI_VERSION_THRESHOLD = 
VersionUtil.encodeVersion("0", "94", "10");
 public static final int MAX_LOCAL_SI_VERSION_DISALLOW = 
VersionUtil.encodeVersion("0", "98", "8");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java
new file mode 100644
index 000..b1c5f65
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableInfo.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.coprocessor;
+
+import java.util.Arrays;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.phoenix.util.SchemaUtil;
+
+public class TableInfo {
+
+private final byte[] tenantId;
+private final byte[] schema;
+private final byte[] name;
+
+public TableInfo(byte[] tenantId, byte[] schema, byte[] name) {
+this.tenantId = tenantId;
+this.schema = schema;
+this.name = name;
+}
+
+public byte[] getRowKeyPrefix() {
+return SchemaUtil.getTableKey(tenantId, schema, name);
+}
+
+@Override
+public String toString() {
+return Bytes.toStringBinary(getRowKeyPrefix());
+}
+
+public byte[] getTenantId() {
+return tenantId;
+}
+
+public byte[] getSchemaName() {
+return schema;
+}
+
+public byte[] getTableName() {
+return name;
+}
+
+@Override
+public int hashCode() {
+final int prime = 31;
+int result = 1;
+result = prime * result + Arrays.hashCode(name);
+result = prime * result + Arrays.hashCode(schema);
+result = prime * result + Arrays.hashCode(tenantId);
+return result;
+}
+
+@Override
+public boolean equals(Object obj) {
+if (this == obj) return true;
+if (obj == null) return false;
+if (getClass() != obj.getClass()) return false;
+TableInfo other = (TableInfo) obj;
+if (!Arrays.equals(name, other.name)) return false;
+if (!Arrays.equals(schema, other.schema)) return false;
+if (!Arrays.equals(tenantId, other.tenantId)) return false;
+return true;
+}
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableViewFinderResult.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/TableViewFinderRe

Apache-Phoenix | Master | Build Successful

2018-07-25 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf?p=phoenix.git;a=shortlog;h=refs/heads/master

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master/lastCompletedBuild/testReport/

Changes
[elserj] PHOENIX-4817 Fixed Phoenix Tracing Web Application (fixed check null,



Build times for last couple of runsLatest build time is the right most | Legend blue: normal, red: test failure, gray: timeout


[29/50] [abbrv] phoenix git commit: Changed version to 5.1.0-HBase-2.0-SNAPSHOT

2018-07-25 Thread elserj
Changed version to 5.1.0-HBase-2.0-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ae234304
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ae234304
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ae234304

Branch: refs/heads/master
Commit: ae2343042e022e24a69cf7754c63844af49085d3
Parents: 8a819c6
Author: Rajeshbabu Chintaguntla 
Authored: Wed Jul 4 07:50:39 2018 +0530
Committer: Rajeshbabu Chintaguntla 
Committed: Wed Jul 4 07:50:39 2018 +0530

--
 phoenix-assembly/pom.xml   | 2 +-
 phoenix-client/pom.xml | 2 +-
 phoenix-core/pom.xml   | 2 +-
 phoenix-flume/pom.xml  | 2 +-
 phoenix-hive/pom.xml   | 2 +-
 phoenix-kafka/pom.xml  | 2 +-
 phoenix-load-balancer/pom.xml  | 2 +-
 phoenix-pherf/pom.xml  | 2 +-
 phoenix-pig/pom.xml| 2 +-
 phoenix-queryserver-client/pom.xml | 2 +-
 phoenix-queryserver/pom.xml| 2 +-
 phoenix-server/pom.xml | 2 +-
 phoenix-spark/pom.xml  | 2 +-
 phoenix-tracing-webapp/pom.xml | 2 +-
 pom.xml| 2 +-
 15 files changed, 15 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-assembly/pom.xml
--
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
index 77b1b83..5cbe55f 100644
--- a/phoenix-assembly/pom.xml
+++ b/phoenix-assembly/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-HBase-2.0
+5.1.0-HBase-2.0-SNAPSHOT
   
   phoenix-assembly
   Phoenix Assembly

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-client/pom.xml
--
diff --git a/phoenix-client/pom.xml b/phoenix-client/pom.xml
index 7b53483..6189bba 100644
--- a/phoenix-client/pom.xml
+++ b/phoenix-client/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-HBase-2.0
+5.1.0-HBase-2.0-SNAPSHOT
   
   phoenix-client
   Phoenix Client

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-core/pom.xml
--
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 96610fd..4065564 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -4,7 +4,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-HBase-2.0
+5.1.0-HBase-2.0-SNAPSHOT
   
   phoenix-core
   Phoenix Core

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-flume/pom.xml
--
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index affd5ce..1d66c90 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -26,7 +26,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-HBase-2.0
+5.1.0-HBase-2.0-SNAPSHOT
   
   phoenix-flume
   Phoenix - Flume

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-hive/pom.xml
--
diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
index d2c995a..08ad855 100644
--- a/phoenix-hive/pom.xml
+++ b/phoenix-hive/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-HBase-2.0
+5.1.0-HBase-2.0-SNAPSHOT
   
   phoenix-hive
   Phoenix - Hive

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-kafka/pom.xml
--
diff --git a/phoenix-kafka/pom.xml b/phoenix-kafka/pom.xml
index 8abf6fe..cde8b8b 100644
--- a/phoenix-kafka/pom.xml
+++ b/phoenix-kafka/pom.xml
@@ -26,7 +26,7 @@

org.apache.phoenix
phoenix
-   5.0.0-HBase-2.0
+   5.1.0-HBase-2.0-SNAPSHOT

phoenix-kafka
Phoenix - Kafka

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-load-balancer/pom.xml
--
diff --git a/phoenix-load-balancer/pom.xml b/phoenix-load-balancer/pom.xml
index 53a370e..4789c14 100644
--- a/phoenix-load-balancer/pom.xml
+++ b/phoenix-load-balancer/pom.xml
@@ -27,7 +27,7 @@
   
 org.apache.phoenix
 phoenix
-5.0.0-HBase-2.0
+5.1.0-HBase-2.0-SNAPSHOT
   
   phoenix-load-balancer
   Phoenix Load Balancer

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae234304/phoenix-pherf/pom.xml
--
diff --git a/phoenix-pherf/pom.xml b/phoenix-pherf/pom.xml
index b5d5631..b5880f1 100644
--- a/phoenix-pherf/pom.xml
+++ b/phoenix-pherf/pom.xml
@@ -15,7 +15,7 @@

 

[34/50] [abbrv] phoenix git commit: PHOENIX-4790 Addendum to check that query is a point lookup for delete not to run query

2018-07-25 Thread elserj
PHOENIX-4790 Addendum to check that query is a point lookup for delete not to 
run query


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ab930f49
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ab930f49
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ab930f49

Branch: refs/heads/master
Commit: ab930f49762e5ccf3f4ac108a2402552753f0d6a
Parents: b91d7b0
Author: James Taylor 
Authored: Wed Jul 11 22:01:14 2018 -0700
Committer: James Taylor 
Committed: Wed Jul 11 22:06:13 2018 -0700

--
 .../src/main/java/org/apache/phoenix/compile/DeleteCompiler.java  | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ab930f49/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index f4e8896..34c1590 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -542,6 +542,9 @@ public class DeleteCompiler {
 Iterator iterator = queryPlans.iterator();
 while (iterator.hasNext()) {
 QueryPlan plan = iterator.next();
+// Must be a point lookup in order to not run a query since
+// we have to have the full key be enumerated.
+noQueryReqd &= plan.getContext().getScanRanges().isPointLookup();
 if (plan.getTableRef().getTable().getIndexType() == 
IndexType.LOCAL) {
 if (!plan.getContext().getDataColumns().isEmpty()) {
 iterator.remove();



[28/50] [abbrv] phoenix git commit: Updating KEYS for rajeshb...@apache.org

2018-07-25 Thread elserj
Updating KEYS for rajeshb...@apache.org


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8a819c6c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8a819c6c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8a819c6c

Branch: refs/heads/master
Commit: 8a819c6c3b4befce190c6ac759f744df511de61d
Parents: 479fab0
Author: Rajeshbabu Chintaguntla 
Authored: Tue Jun 26 10:34:08 2018 -0700
Committer: Rajeshbabu Chintaguntla 
Committed: Tue Jun 26 10:34:08 2018 -0700

--
 KEYS | 58 ++
 1 file changed, 58 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8a819c6c/KEYS
--
diff --git a/KEYS b/KEYS
index dca2b38..ce3fcae 100644
--- a/KEYS
+++ b/KEYS
@@ -129,3 +129,61 @@ 
b4Ki2TbCKOPwRYX6+b2vmmOUlZ28yjeAhzHKXS9lh7nY2T+cv5cwkPZ1uw2AKG55
 pcl2PvSVaZeoTjguW8BqFjkEzA==
 =p/IB
 -END PGP PUBLIC KEY BLOCK-
+pub   4096R/AAEDBD7B 2018-06-11
+uid  Rajeshbabu Chintaguntla (CODE SIGNING KEY) 

+sig 3AAEDBD7B 2018-06-11  Rajeshbabu Chintaguntla (CODE SIGNING KEY) 

+sub   4096R/2A1817DB 2018-06-11
+sig  AAEDBD7B 2018-06-11  Rajeshbabu Chintaguntla (CODE SIGNING KEY) 

+
+-BEGIN PGP PUBLIC KEY BLOCK-
+Version: GnuPG v1
+
+mQINBFseLj8BEADgUf2qoeYAE3MzcxEDysTm0fo/qtHJXTvZexVi+w/Xg86yvSuw
+WzChpYY8Z04cY+ruXRmwfvsfH//YlquJr02uHavojeqKsOW2kV03czh16ktNWwtS
+v7OIU3RzCFt67D2wtMJzbiA9l1WJyzvOx7xnV+uovUNsURYc42YEnwgMJPUQUz0D
+4x8SBKBmL31DELZ72PdgsMSNs7xBrvsXqybDlWkFFkTZWQ6ZwGE6L22ddmOT4N8e
+0FMxIJRrCPn7xMKF2xUEE05sKw+lxLbCO38gyc42AeEVhP+qCvG3E1ZhvqNe/l1y
+LHm4vBNxmR9bgXHf2BvmSBKdGeI2oHM7BQzZ071nH3PHnwr3ksTvNpqW9FkgzjEp
+02fv/n9iANp4ZYCphOdm1Ea9iGKvn0M44seoktT2Gh0Eof3hNOsHJcOCmU494w4I
+77zeCFHbLEVpCMNDLzHccmBBD5FNoOQRQouTuy8aWXsZeRUQrzLw3fF5KcxEtTaI
+SzYKmbGJayeYDpLDvZcU7KZyKm3X+bHWWXlU4sG37hCDfEcQjSwJBXyDmWMpqw7R
+FC6pFHCjIXofoNFdY6nMOeQ89B8hSCzgBb5Dh8hxwsGIjSyNF63reJ64EAKAi69s
+EAmWx1f3ivi54ZSHhb2mQ/vfdy84xUk8RwYF6WBNmR3I+DRQNKpKDvUxSwARAQAB
+tEJSYWplc2hiYWJ1IENoaW50YWd1bnRsYSAoQ09ERSBTSUdOSU5HIEtFWSkgPHJh
+amVzaGJhYnVAYXBhY2hlLm9yZz6JAjgEEwECACIFAlseLj8CGwMGCwkIBwMCBhUI
+AgkKCwQWAgMBAh4BAheAAAoJEDGP2Guq7b1774QQAN2//IV3qQNGoGU2Ihvo3F3n
+6YkW8y9qy3+Gz17mNhcBgtxZitf8xdch5JqDh/TU/LpV4z9guxnM3dNuwvZ3tbIw
+qog0UTDGoeCe4YokRI0kl19a1rn575PQyYu6TEEJrgTKA/yzz8Ck2UbEJ6gJMkxj
+DW5EXFm0QN5Eg+NMrsxcDuf+9hFJ1LagFXZdhPfI9+XzuDDXgZaTXxhd8QQu0d/d
+dcrEc1Pnk65CdDHHezdtMvm0WTw+staTO6/bF6QonM+qgpsZaPxtnU3/rMamGT5k
+rRG+STpLDdz4EUkk5+ZMSJei7glxv4Nx+16yi2YoAtRgoxyjPKBG8v6GCSWXu2K+
+LYoUZmQsnUGgvH2E8V/7XAh68H4/YSTnqhn8TEq5771aya9PzhhvHipSHkWxIkR2
+UjmnJjTkBCXkM4ju4hc/QHFa2J/tvzcAx0WRCUqioE6i/JI72C+nmgRZ4TUYy/nS
+tpAH4MSlWCwEWHEKDs85rKIDTL8qAi4X4HAreSsZp8+igYsr0zkhLjAhaJB6qMpB
+oek0Ke7O1L4m5AEzMayJzMdqdSKK4X3rzW8Qu7Wc1e+oN3BXEWfdGOYB3Mrssfhp
+ZvJEHrTwuMBtb2bAdoiCG5Q/i6mozfzfYaGHd5i7J2ujDdC9Qie/lRyQlwryy1gb
+hxGRAScz2xfMa/XNCawCuQINBFseLj8BEAChyrxmBZsR/gKi0Wj6TaLGENMf2mT7
+hiVTnKYU0yAgA1LsDGMVyF+dzwNgLL600LLqrqFMTVqrOiEF3hKLAQ3cjxSE7rwg
+0X02ZCOX69Y3+7/xOAMUT93Aqk1WZYUN28uCGO+6Y7Q4oT/V8OZbixSzaIAJ3jyI
+jGElDaw+VMYydMXU7Z9c4rhIjjEE3AkKtlB5KGAKHTUYi8GiJhqrmy02jhtHSXT/
+AwAIIxT8gZ5Rgx4MErIjGn7fp+fvbMYK0FbcbreO7flyls6dHwWAGcI5VbxWCCZu
+5YZlpo8z/iYkGLB2QV/gtzp7Wm4lZkMX61j+PTpzNO7I6rRQckhyeFF1ZcPK73ey
+EmjwWBmo7K/iMPWCYFrU7/ybdXf+TDzuyUYosJUSqG8cIERHBIQc5E4TsdNd3hJR
+TPNF9YzXv/iJK9PDO962zLS3cP373/QCiU+Q8rAhqfrX9Mygobsvc1PopPVn66hc
+BzwUUuLUY/5du2/hh8BC99BIa3BJ/wxCN7kc1UGdyyffoBB5gcnOC14r7SbJSVNU
+ymw1y8UXC2XQQyKFixUCsJfY94ZwqO7cmPxsWBc+DUtL7AP69ZkzYFmPlIwh0f+G
+l8/m4ai+tWnxIUhMfyvwyo2E4AJ+AGCfZwUdxB+uCD7AbTy/Sef7vv0zXGTk7brb
+am8myY/u0xaF8QARAQABiQIfBBgBAgAJBQJbHi4/AhsMAAoJEDGP2Guq7b17EZMQ
+AJAWU7hFSZP1tlwDziRN2FSSd+jHV59oBxzC1MONhT9c0VneodzER/NaK+6N2H7+
+CpQRV6ePzG3iUPj2Wtz8U/eX42Ia8OwnmLM0voJH529ZLPv5tVxqMV+UgbssDMWu
+nTeLw/pg4Vy8bec0HHPMwsmooQJQXu+e9/5DoRrepaGMFXu+BpKOQDN8Gvlq0BEA
+v9ojEbhRlxflV11VBq57bj1d+5F+JOKexqJGsbZvnY86j7hAY9BIKUKaA4HmhKX2
+vRjM6izxVA0ivJM5X/3qHMakjARrUFnbQkfh+dWQo5NZfQMp5m6uhQwwfhGIYhII
+iqk0+zhbgix9DfK7muLdEvE4TxAQO4sc8s/EtEmfHFWVtZA0mf+6dqU9WrigMEq3
+yA1KmQYUjXR+/iax60nd5SacdFKyehhFSAE7dC8Sqp/wT+fkgcYzUEzkiMVqx53n
+PXfiDPR3VzC0FwBLgXyJLf3B9lGuICia1wcNc4oKiA68kxc3EPbcYxLtVMKRTvBf
+dxN6Siuv6yJzuGJaj/C1fPGVwzbt/x0L7CQw8W5k3XzZlp7bCYyd7AXNZZOvAEQ9
+mE6f5bevE4Bock2Ee72bg391F7sf/bFXAouOiUgyoVHmW2ZSMfLWx6fP8Dq2AmAM
+hP9muXjqvjO7SxxLReObjt/gs3HtjFUnJzTX/TlAaMq9
+=rPAq
+-END PGP PUBLIC KEY BLOCK-



[32/50] [abbrv] phoenix git commit: PHOENIX-3383 Comparison between descending row keys used in RVC is reverse

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b91d7b0d/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
index 87f00e4..a5287cb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.compile;
 
-import static java.util.Collections.singletonList;
-
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -27,10 +25,14 @@ import java.util.Comparator;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
 import java.util.Set;
 
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.expression.AndExpression;
 import org.apache.phoenix.expression.BaseExpression;
 import 
org.apache.phoenix.expression.BaseExpression.ExpressionComparabilityWrapper;
@@ -61,7 +63,6 @@ import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.RowKeySchema;
 import org.apache.phoenix.schema.SaltingUtil;
 import org.apache.phoenix.schema.SortOrder;
-import org.apache.phoenix.schema.ValueSchema.Field;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PChar;
 import org.apache.phoenix.schema.types.PDataType;
@@ -74,8 +75,11 @@ import org.apache.phoenix.util.SchemaUtil;
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
+import edu.umd.cs.findbugs.annotations.NonNull;
+
 
 /**
  *
@@ -115,6 +119,7 @@ public class WhereOptimizer {
RowKeySchema schema = table.getRowKeySchema();
boolean isMultiTenant = tenantId != null && table.isMultiTenant();
boolean isSharedIndex = table.getViewIndexId() != null;
+   ImmutableBytesWritable ptr = context.getTempPtr();

if (isMultiTenant) {
 tenantIdBytes = ScanUtil.getTenantIdBytes(schema, isSalted, 
tenantId, isSharedIndex);
@@ -158,31 +163,13 @@ public class WhereOptimizer {
 
 int pkPos = 0;
 int nPKColumns = table.getPKColumns().size();
-int[] slotSpan = new int[nPKColumns];
+int[] slotSpanArray = new int[nPKColumns];
 List> cnf = 
Lists.newArrayListWithExpectedSize(schema.getMaxFields());
-KeyRange minMaxRange = keySlots.getMinMaxRange();
-if (minMaxRange == null) {
-minMaxRange = KeyRange.EVERYTHING_RANGE;
-}
-boolean hasMinMaxRange = (minMaxRange != KeyRange.EVERYTHING_RANGE);
-int minMaxRangeOffset = 0;
-byte[] minMaxRangePrefix = null;
 boolean hasViewIndex = table.getViewIndexId() != null;
-if (hasMinMaxRange) {
-int minMaxRangeSize = (isSalted ? SaltingUtil.NUM_SALTING_BYTES : 
0)
-+ (isMultiTenant ? tenantIdBytes.length + 1 : 0)
-+ (hasViewIndex ? 
MetaDataUtil.getViewIndexIdDataType().getByteSize() : 0);
-minMaxRangePrefix = new byte[minMaxRangeSize];
-}
-
-Iterator iterator = keySlots.iterator();
+Iterator iterator = 
keySlots.getSlots().iterator();
 // Add placeholder for salt byte ranges
 if (isSalted) {
 cnf.add(SALT_PLACEHOLDER);
-if (hasMinMaxRange) {
-   System.arraycopy(SALT_PLACEHOLDER.get(0).getLowerRange(), 
0, minMaxRangePrefix, minMaxRangeOffset, SaltingUtil.NUM_SALTING_BYTES);
-   minMaxRangeOffset += SaltingUtil.NUM_SALTING_BYTES;
-}
 // Increment the pkPos, as the salt column is in the row schema
 // Do not increment the iterator, though, as there will never be
 // an expression in the keySlots for the salt column
@@ -194,35 +181,17 @@ public class WhereOptimizer {
 if (hasViewIndex) {
 byte[] viewIndexBytes = 
MetaDataUtil.getViewIndexIdDataType().toBytes(table.getViewIndexId());
 KeyRange indexIdKeyRange = KeyRange.getKeyRange(viewIndexBytes);
-cnf.add(singletonList(indexIdKeyRange));
-if (hasMinMaxRange) {
-System.arraycopy(viewIndexBytes, 0, minMaxRangePrefix, 
minMaxRangeOffset, viewIndexBytes.length);
-minMaxRangeOffset += viewIndexBytes.length;
-}
+cnf.add(Collections.singletonList(indexIdKeyRange));
 pkPos++;
 }
 
 // Add tenant data isolation for tenant

[47/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and 
Rahul Gidwani)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d56fd3c9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d56fd3c9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d56fd3c9

Branch: refs/heads/master
Commit: d56fd3c991b0295aaa91ab916a0fdaed9c044402
Parents: 0af8b1e
Author: Thomas D'Silva 
Authored: Sat Jul 14 11:34:47 2018 -0700
Committer: Thomas D'Silva 
Committed: Wed Jul 18 11:23:26 2018 -0700

--
 .../coprocessor/MetaDataEndpointImplIT.java |  294 ++
 .../StatisticsCollectionRunTrackerIT.java   |2 +-
 .../AlterMultiTenantTableWithViewsIT.java   |  284 +-
 .../apache/phoenix/end2end/AlterTableIT.java|   45 +-
 .../phoenix/end2end/AlterTableWithViewsIT.java  |  541 ++--
 .../phoenix/end2end/AppendOnlySchemaIT.java |4 +-
 .../end2end/BaseTenantSpecificViewIndexIT.java  |   38 +-
 .../end2end/ExplainPlanWithStatsEnabledIT.java  |   69 +-
 .../MigrateSystemTablesToSystemNamespaceIT.java |   38 +-
 .../apache/phoenix/end2end/PhoenixDriverIT.java |   37 +-
 .../end2end/QueryDatabaseMetaDataIT.java|9 +-
 .../apache/phoenix/end2end/SaltedViewIT.java|   45 -
 .../phoenix/end2end/SplitSystemCatalogIT.java   |   80 +
 .../end2end/SplitSystemCatalogTests.java|   11 +
 .../StatsEnabledSplitSystemCatalogIT.java   |  244 ++
 .../SystemCatalogCreationOnConnectionIT.java|   34 +-
 .../apache/phoenix/end2end/SystemCatalogIT.java |   50 +-
 .../end2end/TenantSpecificTablesDDLIT.java  |   13 +-
 .../end2end/TenantSpecificViewIndexIT.java  |   68 +-
 .../org/apache/phoenix/end2end/UpgradeIT.java   |  322 +--
 .../java/org/apache/phoenix/end2end/ViewIT.java |  844 --
 .../phoenix/end2end/index/BaseIndexIT.java  |   43 +-
 .../index/ChildViewsUseParentViewIndexIT.java   |7 +-
 .../phoenix/end2end/index/DropColumnIT.java |  117 -
 .../phoenix/end2end/index/IndexMetadataIT.java  |4 +-
 .../phoenix/end2end/index/MutableIndexIT.java   |  876 +++---
 .../phoenix/end2end/index/ViewIndexIT.java  |   68 +-
 .../apache/phoenix/execute/PartialCommitIT.java |4 +-
 .../SystemCatalogWALEntryFilterIT.java  |   78 +-
 .../org/apache/phoenix/rpc/UpdateCacheIT.java   |9 +-
 .../ColumnNameTrackingExpressionCompiler.java   |   46 +
 .../phoenix/compile/CreateTableCompiler.java|2 +-
 .../apache/phoenix/compile/FromCompiler.java|   15 +-
 .../phoenix/compile/ListJarsQueryPlan.java  |2 +-
 .../apache/phoenix/compile/TraceQueryPlan.java  |2 +-
 .../apache/phoenix/compile/UnionCompiler.java   |2 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |2 +-
 .../coprocessor/MetaDataEndpointImpl.java   | 2672 +-
 .../phoenix/coprocessor/MetaDataProtocol.java   |4 +-
 .../apache/phoenix/coprocessor/TableInfo.java   |   79 +
 .../coprocessor/TableViewFinderResult.java  |   48 +
 .../apache/phoenix/coprocessor/ViewFinder.java  |  144 +
 .../coprocessor/WhereConstantParser.java|  106 +
 .../coprocessor/generated/MetaDataProtos.java   |  626 +++-
 .../coprocessor/generated/PTableProtos.java |  323 ++-
 .../phoenix/expression/LikeExpression.java  |2 +-
 .../apache/phoenix/jdbc/PhoenixConnection.java  |8 +-
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |  531 ++--
 .../apache/phoenix/jdbc/PhoenixStatement.java   |8 +-
 .../phoenix/parse/DropTableStatement.java   |8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |2 +-
 .../phoenix/query/ConnectionQueryServices.java  |   17 +-
 .../query/ConnectionQueryServicesImpl.java  |   43 +-
 .../query/ConnectionlessQueryServicesImpl.java  |   13 +-
 .../query/DelegateConnectionQueryServices.java  |8 +-
 .../apache/phoenix/query/QueryConstants.java|   15 +-
 .../org/apache/phoenix/query/QueryServices.java |2 +
 .../phoenix/query/QueryServicesOptions.java |2 +
 .../SystemCatalogWALEntryFilter.java|   47 +-
 .../apache/phoenix/schema/DelegateColumn.java   |   15 +
 .../apache/phoenix/schema/MetaDataClient.java   |   57 +-
 .../phoenix/schema/MetaDataSplitPolicy.java |   26 +-
 .../java/org/apache/phoenix/schema/PColumn.java |   12 +
 .../org/apache/phoenix/schema/PColumnImpl.java  |  113 +-
 .../apache/phoenix/schema/PMetaDataImpl.java|3 +-
 .../java/org/apache/phoenix/schema/PTable.java  |   17 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  279 +-
 .../org/apache/phoenix/schema/PTableKey.java|4 +-
 .../schema/ParentTableNotFoundException.java|   30 +
 .../org/apache/phoenix/schema/SaltingUtil.java  |4 +-
 .../SplitOnLeadingVarCharColumnsPolicy.java |3 +
 .../apache/phoenix/schema/TableProperty.java|   22 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |   16 +-
 .../org/apache/p

[24/50] [abbrv] phoenix git commit: PHOENIX-4399 Remove explicit abort on RegionServerServices (addendum)

2018-07-25 Thread elserj
PHOENIX-4399 Remove explicit abort on RegionServerServices (addendum)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b78f45c0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b78f45c0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b78f45c0

Branch: refs/heads/master
Commit: b78f45c03ed014297fe848b106da86e58f4aa3e0
Parents: aa2f12d
Author: Ankit Singhal 
Authored: Fri Jun 22 15:58:09 2018 -0700
Committer: Ankit Singhal 
Committed: Fri Jun 22 15:58:09 2018 -0700

--
 .../org/apache/phoenix/hbase/index/util/IndexManagementUtil.java   | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b78f45c0/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
index 2d65747..6c7966f 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
 import org.apache.phoenix.hbase.index.ValueGetter;
+import 
org.apache.phoenix.hbase.index.builder.FatalIndexBuildingFailureException;
 import org.apache.phoenix.hbase.index.builder.IndexBuildingFailureException;
 import org.apache.phoenix.hbase.index.covered.Batch;
 import org.apache.phoenix.hbase.index.covered.data.LazyValueGetter;
@@ -200,6 +201,7 @@ public class IndexManagementUtil {
 LOG.info("Rethrowing " + e);
 throw e1;
 } catch (Throwable e1) {
+if (e1 instanceof FatalIndexBuildingFailureException) { throw 
(FatalIndexBuildingFailureException)e1; }
 LOG.info("Rethrowing " + e1 + " as a " + 
IndexBuildingFailureException.class.getSimpleName());
 throw new IndexBuildingFailureException("Failed to build index for 
unexpected reason!", e1);
 }



[13/50] [abbrv] phoenix git commit: Fix Apache RAT warnings

2018-07-25 Thread elserj
Fix Apache RAT warnings


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b19fde2c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b19fde2c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b19fde2c

Branch: refs/heads/4.x-HBase-1.4
Commit: b19fde2c5f2b4df957b9d880a1a7a7036f45453a
Parents: d6012ca
Author: Mujtaba 
Authored: Fri May 25 16:43:51 2018 -0700
Committer: Mujtaba 
Committed: Fri May 25 16:43:51 2018 -0700

--
 .../end2end/index/MutableIndexRebuilderIT.java| 18 ++
 1 file changed, 18 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b19fde2c/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
index 8420f16..a3af20d 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexRebuilderIT.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.phoenix.end2end.index;
 
 import static org.junit.Assert.assertEquals;



[35/50] [abbrv] phoenix git commit: PHOENIX-4809 Only cache PhoenixConnections when lease renewal is on

2018-07-25 Thread elserj
PHOENIX-4809 Only cache PhoenixConnections when lease renewal is on

Lease renewal is the only mechanism under which connections are removed
from the connectionQueue. Calling close() on a connection doesn't proactively
remove it from the instance of ConnectionQueryServicesImpl.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f46e8bbc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f46e8bbc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f46e8bbc

Branch: refs/heads/master
Commit: f46e8bbcd48674f7de217feeac2679a593b042be
Parents: ab930f4
Author: Josh Elser 
Authored: Wed Jul 11 17:02:46 2018 -0400
Committer: Josh Elser 
Committed: Thu Jul 12 13:21:02 2018 -0400

--
 .../phoenix/query/ConnectionCachingIT.java  | 87 
 .../query/ConnectionQueryServicesImpl.java  | 11 ++-
 2 files changed, 97 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f46e8bbc/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
new file mode 100644
index 000..b2ef052
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/query/ConnectionCachingIT.java
@@ -0,0 +1,87 @@
+package org.apache.phoenix.query;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertTrue;
+
+import java.lang.ref.WeakReference;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.util.Arrays;
+import java.util.Properties;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.apache.phoenix.end2end.ParallelStatsEnabledIT;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.query.ConnectionQueryServices;
+import org.apache.phoenix.query.ConnectionQueryServicesImpl;
+import org.apache.phoenix.query.DelegateConnectionQueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@RunWith(Parameterized.class)
+public class ConnectionCachingIT extends ParallelStatsEnabledIT {
+  private static final Logger LOG = 
LoggerFactory.getLogger(ConnectionCachingIT.class);
+
+  @Parameters(name= "phoenix.scanner.lease.renew.enabled={0}")
+  public static Iterable data() {
+return Arrays.asList("true", "false");
+  }
+
+  private String leaseRenewal;
+
+  public ConnectionCachingIT(String leaseRenewalValue) {
+this.leaseRenewal = leaseRenewalValue;
+  }
+
+  @Test
+  public void test() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+props.put("phoenix.scanner.lease.renew.enabled", leaseRenewal);
+
+// The test driver works correctly, the real one doesn't.
+String url = getUrl();
+url = url.replace(";" + PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM, "");
+LOG.info("URL to use is: {}", url);
+
+Connection conn = DriverManager.getConnection(url, props);
+long before = getNumCachedConnections(conn);
+for (int i = 0; i < 10_000; i++) {
+  Connection c = DriverManager.getConnection(url, props);
+  c.close();
+}
+
Thread.sleep(QueryServicesOptions.DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS
 / 2);
+long after = getNumCachedConnections(conn);
+for (int i = 0; i < 6; i++) {
+  LOG.info("Found {} connections cached", after);
+  if (after <= before) {
+break;
+  }
+  
Thread.sleep(QueryServicesOptions.DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS
 / 2);
+  after = getNumCachedConnections(conn);
+}
+assertTrue("Saw " + before + " connections, but ended with " + after, 
after <= before);
+  }
+
+  long getNumCachedConnections(Connection conn) throws Exception {
+PhoenixConnection pConn = conn.unwrap(PhoenixConnection.class);
+ConnectionQueryServices cqs = pConn.getQueryServices();
+// For whatever reason, we sometimes get a delegate here, and sometimes 
the real thing.
+if (cqs instanceof DelegateConnectionQueryServices) {
+  cqs = ((DelegateConnectionQueryServices) cqs).getDelegate();
+}
+assertTrue("ConnectionQueryServices was a " + cqs.getClass(), cqs 
instanceof ConnectionQueryServicesImpl);
+ConnectionQueryServicesImpl cqsi = (ConnectionQueryServicesImpl) cqs;
+long cachedConnections = 0L;
+for (LinkedBlockingQueue> q

[14/50] [abbrv] phoenix git commit: PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck

2018-07-25 Thread elserj
PHOENIX-4759 During restart RS that hosts SYSTEM.CATALOG table may get stuck


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/614c57d9
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/614c57d9
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/614c57d9

Branch: refs/heads/4.x-HBase-1.4
Commit: 614c57d91ba11bd8109dac769860e0186c8752bf
Parents: b19fde2
Author: Sergey Soldatov 
Authored: Thu May 31 12:07:29 2018 -0700
Committer: ss77892 
Committed: Thu May 31 12:36:29 2018 -0700

--
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |  6 +++---
 .../phoenix/coprocessor/MetaDataProtocol.java  | 16 +++-
 .../apache/phoenix/exception/SQLExceptionCode.java |  6 +++---
 .../index/write/ParallelWriterIndexCommitter.java  |  4 ++--
 .../TrackingParallelWriterIndexCommitter.java  |  4 ++--
 .../phoenix/index/PhoenixTransactionalIndexer.java |  4 ++--
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java  | 17 +
 .../phoenix/query/ConnectionQueryServicesImpl.java |  4 ++--
 .../org/apache/phoenix/schema/MetaDataClient.java  |  4 ++--
 .../java/org/apache/phoenix/util/ScanUtil.java |  3 +--
 10 files changed, 33 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/614c57d9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index ff62c92..5e2e4df 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -946,12 +946,12 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 .getValueArray()[indexStateKv.getValueOffset()]);
 // If client is not yet up to 4.12, then translate PENDING_ACTIVE to 
ACTIVE (as would have been
 // the value in those versions) since the client won't have this index 
state in its enum.
-if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_ACTIVE_INDEX) {
+if (indexState == PIndexState.PENDING_ACTIVE && clientVersion < 
MetaDataProtocol.MIN_PENDING_ACTIVE_INDEX) {
 indexState = PIndexState.ACTIVE;
 }
 // If client is not yet up to 4.14, then translate PENDING_DISABLE to 
DISABLE
 // since the client won't have this index state in its enum.
-if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
PhoenixDatabaseMetaData.MIN_PENDING_DISABLE_INDEX) {
+if (indexState == PIndexState.PENDING_DISABLE && clientVersion < 
MetaDataProtocol.MIN_PENDING_DISABLE_INDEX) {
 // note: for older clients, we have to rely on the rebuilder to 
transition PENDING_DISABLE -> DISABLE
 indexState = PIndexState.DISABLE;
 }
@@ -3687,7 +3687,7 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 GetVersionResponse.Builder builder = GetVersionResponse.newBuilder();
 Configuration config = env.getConfiguration();
 if (isTablesMappingEnabled
-&& 
PhoenixDatabaseMetaData.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
+&& MetaDataProtocol.MIN_NAMESPACE_MAPPED_PHOENIX_VERSION > 
request.getClientVersion()) {
 logger.error("Old client is not compatible when" + " system tables 
are upgraded to map to namespace");
 ProtobufUtil.setControllerException(controller,
 ServerUtil.createIOException(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/614c57d9/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
index 62b701d..883f96d 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataProtocol.java
@@ -93,7 +93,21 @@ public abstract class MetaDataProtocol extends 
MetaDataService {
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_14_0 = 
MIN_TABLE_TIMESTAMP + 28;
 // MIN_SYSTEM_TABLE_TIMESTAMP needs to be set to the max of all the 
MIN_SYSTEM_TABLE_TIMESTAMP_* constants
 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = 
MIN_SYSTEM_TABLE_TIMESTAMP

[16/50] [abbrv] phoenix git commit: PHOENIX-4762 Performance regression with transactional immutable indexes

2018-07-25 Thread elserj
PHOENIX-4762 Performance regression with transactional immutable indexes


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/14baca9d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/14baca9d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/14baca9d

Branch: refs/heads/4.x-HBase-1.4
Commit: 14baca9ded1d68a97c1f559806fa34dfe8c9415e
Parents: 614c57d
Author: James Taylor 
Authored: Fri Jun 1 09:03:21 2018 -0700
Committer: James Taylor 
Committed: Fri Jun 1 10:18:49 2018 -0700

--
 .../apache/phoenix/execute/MutationState.java   | 701 ++-
 .../PhoenixTxIndexMutationGenerator.java|   2 +-
 .../transaction/OmidTransactionContext.java |   6 +
 .../transaction/PhoenixTransactionContext.java  |   6 +
 .../transaction/TephraTransactionContext.java   |  16 +
 .../java/org/apache/phoenix/util/IndexUtil.java |  28 -
 6 files changed, 385 insertions(+), 374 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/14baca9d/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java 
b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index 52e490e..2e795b1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -38,10 +38,9 @@ import javax.annotation.Nonnull;
 import javax.annotation.concurrent.Immutable;
 
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.phoenix.util.KeyValueUtil;
 import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
@@ -83,7 +82,6 @@ import org.apache.phoenix.schema.PMetaData;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PRow;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTableRef;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeySchema;
@@ -98,6 +96,7 @@ import org.apache.phoenix.transaction.TransactionFactory;
 import org.apache.phoenix.transaction.TransactionFactory.Provider;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.IndexUtil;
+import org.apache.phoenix.util.KeyValueUtil;
 import org.apache.phoenix.util.LogUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.SQLCloseable;
@@ -116,9 +115,7 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 /**
- * 
  * Tracks the uncommitted state
- *
  */
 public class MutationState implements SQLCloseable {
 private static final Logger logger = 
LoggerFactory.getLogger(MutationState.class);
@@ -150,30 +147,34 @@ public class MutationState implements SQLCloseable {
 this(maxSize, maxSizeBytes, connection, false, null);
 }
 
-public MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection, PhoenixTransactionContext txContext) {
+public MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection,
+PhoenixTransactionContext txContext) {
 this(maxSize, maxSizeBytes, connection, false, txContext);
 }
 
 public MutationState(MutationState mutationState) {
-this(mutationState.maxSize,  mutationState.maxSizeBytes, 
mutationState.connection, true, mutationState.getPhoenixTransactionContext());
+this(mutationState.maxSize, mutationState.maxSizeBytes, 
mutationState.connection, true, mutationState
+.getPhoenixTransactionContext());
 }
 
 public MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection, long sizeOffset) {
 this(maxSize, maxSizeBytes, connection, false, null, sizeOffset);
 }
 
-private MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection, boolean subTask, PhoenixTransactionContext txContext) {
+private MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection, boolean subTask,
+PhoenixTransactionContext txContext) {
 this(maxSize, maxSizeBytes, connection, subTask, txContext, 0);
 }
 
-private MutationState(long maxSize, long maxSizeBytes, PhoenixConnection 
connection, boolean subTask, PhoenixTransactionContext txContext, long 
sizeOffset) {
-this(maxSize, maxSizeBytes, connection, Maps.newHashMapWithExpectedS

[37/50] [abbrv] phoenix git commit: Revert "PHOENIX-4790 Simplify check for client side delete"

2018-07-25 Thread elserj
Revert "PHOENIX-4790 Simplify check for client side delete"

This reverts commit d35a7519e65174978657b4c6254d595fac1b0009.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bc4ca79e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bc4ca79e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bc4ca79e

Branch: refs/heads/4.x-HBase-1.4
Commit: bc4ca79ee0f7c1074f390c8ad8cd7b0bc51169f3
Parents: 164b2da
Author: James Taylor 
Authored: Thu Jul 12 19:53:04 2018 -0700
Committer: James Taylor 
Committed: Thu Jul 12 19:53:22 2018 -0700

--
 .../apache/phoenix/compile/DeleteCompiler.java  | 24 
 1 file changed, 19 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bc4ca79e/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 78b2db9..5f9c76c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -46,6 +46,7 @@ import org.apache.phoenix.execute.AggregatePlan;
 import org.apache.phoenix.execute.MutationState;
 import org.apache.phoenix.execute.MutationState.MultiRowMutationState;
 import org.apache.phoenix.execute.MutationState.RowMutationState;
+import org.apache.phoenix.filter.SkipScanFilter;
 import org.apache.phoenix.hbase.index.ValueGetter;
 import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
@@ -480,7 +481,6 @@ public class DeleteCompiler {
 projectedColumns.add(column);
 aliasedNodes.add(FACTORY.aliasedNode(null, FACTORY.column(null, 
'"' + column.getName().getString() + '"', null)));
 }
-boolean noQueryReqd = true;
 // Project all non PK indexed columns so that we can do the proper 
index maintenance
 for (PTable index : table.getIndexes()) {
 IndexMaintainer maintainer = index.getIndexMaintainer(table, 
connection);
@@ -492,8 +492,6 @@ public class DeleteCompiler {
 boolean hasNoColumnFamilies = 
table.getColumnFamilies().isEmpty();
 PColumn column = hasNoColumnFamilies ? 
table.getColumnForColumnName(columnName) : 
table.getColumnFamily(familyName).getPColumnForColumnName(columnName);
 if(!projectedColumns.contains(column)) {
-// We must run a query if any index contains a non pk 
column
-noQueryReqd = false;
 projectedColumns.add(column);
 aliasedNodes.add(FACTORY.aliasedNode(null, 
FACTORY.column(hasNoColumnFamilies ? null : TableName.create(null, familyName), 
'"' + columnName + '"', null)));
 }
@@ -513,7 +511,7 @@ public class DeleteCompiler {
 select = StatementNormalizer.normalize(transformedSelect, 
resolverToBe);
 }
 final boolean hasPreOrPostProcessing = hasPreProcessing || 
hasPostProcessing;
-noQueryReqd &= !hasPreOrPostProcessing;
+boolean noQueryReqd = !hasPreOrPostProcessing;
 // No limit and no sub queries, joins, etc in where clause
 // Can't run on same server for transactional data, as we need the row 
keys for the data
 // that is being upserted for conflict detection purposes.
@@ -552,8 +550,24 @@ public class DeleteCompiler {
 }
 
 runOnServer &= queryPlans.get(0).getTableRef().getTable().getType() != 
PTableType.INDEX;
-
+
+// We need to have all indexed columns available in all immutable 
indexes in order
+// to generate the delete markers from the query. We also cannot have 
any filters
+// except for our SkipScanFilter for point lookups.
+// A simple check of the non existence of a where clause in the parse 
node is not sufficient, as the where clause
+// may have been optimized out. Instead, we check that there's a 
single SkipScanFilter
+// If we can generate a plan for every index, that means all the 
required columns are available in every index,
+// hence we can drive the delete from any of the plans.
 noQueryReqd &= queryPlans.size() == 1 + clientSideIndexes.size();
+int queryPlanIndex = 0;
+while (noQueryReqd && queryPlanIndex < queryPlans.size()) {
+QueryPlan plan = queryPlans.get(queryPlanIndex++);
+StatementContext context = plan.getContext();
+noQueryReqd &= (!context.getScan().hasFilter()
+  

[39/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
index 45aca98..a267629 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
@@ -17,6 +17,7 @@
  */
 package org.apache.phoenix.schema;
 
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.query.QueryConstants;
@@ -42,36 +43,63 @@ public class PColumnImpl implements PColumn {
 private boolean isRowTimestamp;
 private boolean isDynamic;
 private byte[] columnQualifierBytes;
-
+private boolean derived;
+private long timestamp;
+
 public PColumnImpl() {
 }
 
-public PColumnImpl(PName name,
-   PName familyName,
-   PDataType dataType,
-   Integer maxLength,
-   Integer scale,
-   boolean nullable,
-   int position,
-   SortOrder sortOrder, Integer arrSize, byte[] 
viewConstant, boolean isViewReferenced, String expressionStr, boolean 
isRowTimestamp, boolean isDynamic, byte[] columnQualifierBytes) {
-init(name, familyName, dataType, maxLength, scale, nullable, position, 
sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, 
isRowTimestamp, isDynamic, columnQualifierBytes);
+public PColumnImpl(PColumn column, int position) {
+this(column, column.isDerived(), position);
 }
 
-public PColumnImpl(PColumn column, int position) {
+public PColumnImpl(PColumn column, byte[] viewConstant, boolean 
isViewReferenced) {
+this(column.getName(), column.getFamilyName(), column.getDataType(), 
column.getMaxLength(),
+column.getScale(), column.isNullable(), column.getPosition(), 
column.getSortOrder(), column.getArraySize(), viewConstant, isViewReferenced, 
column.getExpressionStr(), column.isRowTimestamp(), column.isDynamic(), 
column.getColumnQualifierBytes(),
+column.getTimestamp(), column.isDerived());
+}
+
+public PColumnImpl(PColumn column, boolean derivedColumn, int position) {
+this(column, derivedColumn, position, column.getViewConstant());
+}
+
+public PColumnImpl(PColumn column, boolean derivedColumn, int position, 
byte[] viewConstant) {
 this(column.getName(), column.getFamilyName(), column.getDataType(), 
column.getMaxLength(),
-column.getScale(), column.isNullable(), position, 
column.getSortOrder(), column.getArraySize(), column.getViewConstant(), 
column.isViewReferenced(), column.getExpressionStr(), column.isRowTimestamp(), 
column.isDynamic(), column.getColumnQualifierBytes());
+column.getScale(), column.isNullable(), position, 
column.getSortOrder(), column.getArraySize(), viewConstant, 
column.isViewReferenced(), column.getExpressionStr(), column.isRowTimestamp(), 
column.isDynamic(), column.getColumnQualifierBytes(),
+column.getTimestamp(), derivedColumn);
+}
+
+public PColumnImpl(PName name, PName familyName, PDataType dataType, 
Integer maxLength, Integer scale, boolean nullable,
+int position, SortOrder sortOrder, Integer arrSize, byte[] 
viewConstant, boolean isViewReferenced, String expressionStr, boolean 
isRowTimestamp, boolean isDynamic,
+byte[] columnQualifierBytes, long timestamp) {
+this(name, familyName, dataType, maxLength, scale, nullable, position, 
sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, 
isRowTimestamp, isDynamic, columnQualifierBytes, timestamp, false);
+}
+
+public PColumnImpl(PName name, PName familyName, PDataType dataType, 
Integer maxLength, Integer scale, boolean nullable,
+int position, SortOrder sortOrder, Integer arrSize, byte[] 
viewConstant, boolean isViewReferenced, String expressionStr, boolean 
isRowTimestamp, boolean isDynamic,
+byte[] columnQualifierBytes, long timestamp, boolean derived) {
+init(name, familyName, dataType, maxLength, scale, nullable, position, 
sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, 
isRowTimestamp, isDynamic, columnQualifierBytes, timestamp, derived);
+}
+
+private PColumnImpl(PName familyName, PName columnName, Long timestamp) {
+this.familyName = familyName;
+this.name = columnName;
+this.derived = true;
+if (timestamp!=null) {
+this.timestamp = timestamp;
+}
 }
 
-private void init(PName name,
-PName familyName,
-PDataType da

[26/50] [abbrv] phoenix git commit: PHOENIX-4786 Reduce log level to debug when logging new aggregate row key found and added results for scan ordered queries(Rajeshbabu)

2018-07-25 Thread elserj
PHOENIX-4786 Reduce log level to debug when logging new aggregate row key found 
and added results for scan ordered queries(Rajeshbabu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6acdae0f
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6acdae0f
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6acdae0f

Branch: refs/heads/4.x-HBase-1.4
Commit: 6acdae0ff1a63980f40fe1b794d40ab949cc423d
Parents: c233c15
Author: Rajeshbabu Chintaguntla 
Authored: Fri Jun 15 15:38:44 2018 -0700
Committer: Rajeshbabu Chintaguntla 
Committed: Fri Jun 15 15:38:44 2018 -0700

--
 .../phoenix/coprocessor/GroupedAggregateRegionObserver.java  | 8 
 1 file changed, 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6acdae0f/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
index 86ab275..aefe916 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
@@ -534,14 +534,6 @@ public class GroupedAggregateRegionObserver extends 
BaseScannerRegionObserver {
 currentKey.getLength(), SINGLE_COLUMN_FAMILY, 
SINGLE_COLUMN,
 AGG_TIMESTAMP, value, 0, value.length);
 results.add(keyValue);
-if (logger.isInfoEnabled()) {
-logger.info(LogUtil.addCustomAnnotations("Adding new 
aggregate row: "
-+ keyValue
-+ ",for current key "
-+ Bytes.toStringBinary(currentKey.get(), 
currentKey.getOffset(),
-currentKey.getLength()) + ", aggregated 
values: "
-+ Arrays.asList(rowAggregators), 
ScanUtil.getCustomAnnotations(scan)));
-}
 // If we're at an aggregation boundary, reset the
 // aggregators and
 // aggregate with the current result (which is not a part 
of



[37/50] [abbrv] phoenix git commit: Revert "PHOENIX-4790 Simplify check for client side delete"

2018-07-25 Thread elserj
Revert "PHOENIX-4790 Simplify check for client side delete"

This reverts commit 35366b37106833b43f69ed712e0e3fd1635842cb.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/0af8b1e3
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/0af8b1e3
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/0af8b1e3

Branch: refs/heads/master
Commit: 0af8b1e32c940fe14a66f23240013b4d702d8ec6
Parents: 2b43bea
Author: James Taylor 
Authored: Thu Jul 12 20:10:10 2018 -0700
Committer: James Taylor 
Committed: Thu Jul 12 20:10:10 2018 -0700

--
 .../apache/phoenix/compile/DeleteCompiler.java  | 24 
 1 file changed, 19 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/0af8b1e3/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index f4e8896..5ed4130 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -47,6 +47,7 @@ import org.apache.phoenix.execute.AggregatePlan;
 import org.apache.phoenix.execute.MutationState;
 import org.apache.phoenix.execute.MutationState.MultiRowMutationState;
 import org.apache.phoenix.execute.MutationState.RowMutationState;
+import org.apache.phoenix.filter.SkipScanFilter;
 import org.apache.phoenix.hbase.index.ValueGetter;
 import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
@@ -481,7 +482,6 @@ public class DeleteCompiler {
 projectedColumns.add(column);
 aliasedNodes.add(FACTORY.aliasedNode(null, FACTORY.column(null, 
'"' + column.getName().getString() + '"', null)));
 }
-boolean noQueryReqd = true;
 // Project all non PK indexed columns so that we can do the proper 
index maintenance
 for (PTable index : table.getIndexes()) {
 IndexMaintainer maintainer = index.getIndexMaintainer(table, 
connection);
@@ -493,8 +493,6 @@ public class DeleteCompiler {
 boolean hasNoColumnFamilies = 
table.getColumnFamilies().isEmpty();
 PColumn column = hasNoColumnFamilies ? 
table.getColumnForColumnName(columnName) : 
table.getColumnFamily(familyName).getPColumnForColumnName(columnName);
 if(!projectedColumns.contains(column)) {
-// We must run a query if any index contains a non pk 
column
-noQueryReqd = false;
 projectedColumns.add(column);
 aliasedNodes.add(FACTORY.aliasedNode(null, 
FACTORY.column(hasNoColumnFamilies ? null : TableName.create(null, familyName), 
'"' + columnName + '"', null)));
 }
@@ -514,7 +512,7 @@ public class DeleteCompiler {
 select = StatementNormalizer.normalize(transformedSelect, 
resolverToBe);
 }
 final boolean hasPreOrPostProcessing = hasPreProcessing || 
hasPostProcessing;
-noQueryReqd &= !hasPreOrPostProcessing;
+boolean noQueryReqd = !hasPreOrPostProcessing;
 // No limit and no sub queries, joins, etc in where clause
 // Can't run on same server for transactional data, as we need the row 
keys for the data
 // that is being upserted for conflict detection purposes.
@@ -553,8 +551,24 @@ public class DeleteCompiler {
 }
 
 runOnServer &= queryPlans.get(0).getTableRef().getTable().getType() != 
PTableType.INDEX;
-
+
+// We need to have all indexed columns available in all immutable 
indexes in order
+// to generate the delete markers from the query. We also cannot have 
any filters
+// except for our SkipScanFilter for point lookups.
+// A simple check of the non existence of a where clause in the parse 
node is not sufficient, as the where clause
+// may have been optimized out. Instead, we check that there's a 
single SkipScanFilter
+// If we can generate a plan for every index, that means all the 
required columns are available in every index,
+// hence we can drive the delete from any of the plans.
 noQueryReqd &= queryPlans.size() == 1 + clientSideIndexes.size();
+int queryPlanIndex = 0;
+while (noQueryReqd && queryPlanIndex < queryPlans.size()) {
+QueryPlan plan = queryPlans.get(queryPlanIndex++);
+StatementContext context = plan.getContext();
+noQueryReqd &= (!context.getScan().hasFilter()
+ 

[20/50] [abbrv] phoenix git commit: PHOENIX-4758 Validate that HADOOP_CONF_DIR is not set for HiveMRIT

2018-07-25 Thread elserj
PHOENIX-4758 Validate that HADOOP_CONF_DIR is not set for HiveMRIT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d9007714
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d9007714
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d9007714

Branch: refs/heads/4.x-HBase-1.4
Commit: d9007714a7e62dec1d8d8af05b98685396ba4a42
Parents: a7adbbc
Author: Josh Elser 
Authored: Tue May 29 14:14:04 2018 -0400
Committer: Josh Elser 
Committed: Mon Jun 4 12:23:38 2018 -0400

--
 .../src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java | 8 
 1 file changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d9007714/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
--
diff --git 
a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java 
b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 644ff24..4bc5a7d 100644
--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -18,6 +18,10 @@
 
 package org.apache.phoenix.hive;
 
+import static org.junit.Assert.fail;
+
+import java.util.Map;
+
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.junit.BeforeClass;
 import org.junit.experimental.categories.Category;
@@ -28,6 +32,10 @@ public class HiveMapReduceIT extends HivePhoenixStoreIT {
 
 @BeforeClass
 public static void setUpBeforeClass() throws Exception {
+final String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
+if (hadoopConfDir != null && hadoopConfDir.length() != 0) {
+fail("HADOOP_CONF_DIR is non-empty in the current shell 
environment which will very likely cause this test to fail.");
+}
 setup(HiveTestUtil.MiniClusterType.mr);
 }
 }



[49/50] [abbrv] phoenix git commit: PHOENIX-4797 file not found or file exist exception when create global index use -snapshot option

2018-07-25 Thread elserj
PHOENIX-4797 file not found or file exist exception when create global index 
use -snapshot option


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f2781d43
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f2781d43
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f2781d43

Branch: refs/heads/master
Commit: f2781d43852557fa3a7b571047d13ab6f1530a7f
Parents: 360fb80
Author: 492066199 <492066...@qq.com>
Authored: Fri Jul 6 10:45:38 2018 +0800
Committer: Karan Mehta 
Committed: Wed Jul 25 10:17:16 2018 -0700

--
 .../org/apache/phoenix/iterate/TableSnapshotResultIterator.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2781d43/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
index 984cb84..31746ce 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/iterate/TableSnapshotResultIterator.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -65,7 +66,8 @@ public class TableSnapshotResultIterator implements 
ResultIterator {
 this.scan = scan;
 this.scanMetricsHolder = scanMetricsHolder;
 this.scanIterator = UNINITIALIZED_SCANNER;
-this.restoreDir = new 
Path(configuration.get(PhoenixConfigurationUtil.RESTORE_DIR_KEY));
+this.restoreDir = new 
Path(configuration.get(PhoenixConfigurationUtil.RESTORE_DIR_KEY),
+UUID.randomUUID().toString());
 this.snapshotName = configuration.get(
 PhoenixConfigurationUtil.SNAPSHOT_NAME_KEY);
 this.rootDir = FSUtils.getRootDir(configuration);



[15/50] [abbrv] phoenix git commit: PHOENIX-4762 Performance regression with transactional immutable indexes

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/14baca9d/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
index f3ad42f..751945a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/PhoenixTransactionContext.java
@@ -108,6 +108,11 @@ public interface PhoenixTransactionContext {
 public HTableInterface getTransactionalTable(HTableInterface htable, 
boolean isImmutable) {
 return null;
 }
+
+@Override
+public HTableInterface getTransactionalTableWriter(HTableInterface 
htable, PTable table) {
+return null;
+}
 };
 /**
  * 
@@ -225,4 +230,5 @@ public interface PhoenixTransactionContext {
 public PhoenixTransactionContext 
newTransactionContext(PhoenixTransactionContext contex, boolean subTask);
 
 public HTableInterface getTransactionalTable(HTableInterface htable, 
boolean isImmutable);
+public HTableInterface getTransactionalTableWriter(HTableInterface htable, 
PTable table);
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/14baca9d/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
index 8b16210..bc33cff 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/transaction/TephraTransactionContext.java
@@ -397,5 +397,21 @@ public class TephraTransactionContext implements 
PhoenixTransactionContext {
 this.addTransactionAware(transactionAwareHTable);
 return transactionAwareHTable;
 }
+
+@Override
+public HTableInterface getTransactionalTableWriter(HTableInterface htable, 
PTable table) {
+boolean isIndex = table.getType() == PTableType.INDEX;
+TransactionAwareHTable transactionAwareHTable = new 
TransactionAwareHTable(htable, table.isImmutableRows() || isIndex ? 
TxConstants.ConflictDetection.NONE : TxConstants.ConflictDetection.ROW);
+// Don't add immutable indexes (those are the only ones that would 
participate
+// during a commit), as we don't need conflict detection for these.
+if (isIndex) {
+transactionAwareHTable.startTx(getTransaction());
+} else {
+// Even for immutable, we need to do this so that an abort has the 
state
+// necessary to generate the rows to delete.
+this.addTransactionAware(transactionAwareHTable);
+}
+return transactionAwareHTable;
+}
 
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/14baca9d/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
--
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
index 7e280f4..78a68d2 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTableInterface;
@@ -116,7 +115,6 @@ import org.apache.phoenix.schema.types.PDecimal;
 import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.schema.types.PVarchar;
-import org.apache.phoenix.transaction.PhoenixTransactionContext;
 
 import com.google.common.collect.Lists;
 
@@ -269,32 +267,6 @@ public class IndexUtil {
 .getLength()) == 0);
 }
 
-public static List generateDeleteIndexData(final PTable table, 
PTable index,
-List dataMutations, ImmutableBytesWritable ptr, final 
KeyValueBuilder kvBuilder, PhoenixConnection connection)
-throws SQLException {
-try {
-IndexMaintainer maintainer = index.getIndexMaintainer(table, 
connection);
-List indexMutations = 
Lists.newArrayListWithExpectedSize(dataMutations.size());
-for (final Mutation

[44/50] [abbrv] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-25 Thread elserj
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 34292ba..fdfd75b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-while (rs.next()) {
-

[28/50] [abbrv] phoenix git commit: PHOENIX-4785 Unable to write to table if index is made active during retry

2018-07-25 Thread elserj
PHOENIX-4785 Unable to write to table if index is made active during retry


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6195f8e7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6195f8e7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6195f8e7

Branch: refs/heads/4.x-HBase-1.4
Commit: 6195f8e7b5efeecd5c736ba0ef121b706c875d8d
Parents: 9dbe20a
Author: Ankit Singhal 
Authored: Thu Jun 21 16:11:02 2018 -0700
Committer: Ankit Singhal 
Committed: Thu Jun 21 16:11:02 2018 -0700

--
 .../end2end/index/MutableIndexFailureIT.java| 128 ++-
 .../MutableIndexFailureWithNamespaceIT.java |  80 
 .../coprocessor/MetaDataEndpointImpl.java   |  30 +
 .../index/PhoenixIndexFailurePolicy.java|  71 +-
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   2 +-
 5 files changed, 276 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6195f8e7/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
index dfbaf3f..8f88513 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexFailureIT.java
@@ -28,10 +28,16 @@ import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -104,10 +110,10 @@ public class MutableIndexFailureIT extends BaseTest {
 private final boolean throwIndexWriteFailure;
 private String schema = generateUniqueName();
 private List exceptions = Lists.newArrayList();
-private static RegionCoprocessorEnvironment 
indexRebuildTaskRegionEnvironment;
-private static final int forwardOverlapMs = 1000;
-private static final int disableTimestampThresholdMs = 1;
-private static final int numRpcRetries = 2;
+protected static RegionCoprocessorEnvironment 
indexRebuildTaskRegionEnvironment;
+protected static final int forwardOverlapMs = 1000;
+protected static final int disableTimestampThresholdMs = 1;
+protected static final int numRpcRetries = 2;
 
 public MutableIndexFailureIT(boolean transactional, boolean localIndex, 
boolean isNamespaceMapped, Boolean disableIndexOnWriteFailure, boolean 
failRebuildTask, Boolean throwIndexWriteFailure) {
 this.transactional = transactional;
@@ -127,6 +133,23 @@ public class MutableIndexFailureIT extends BaseTest {
 
 @BeforeClass
 public static void doSetup() throws Exception {
+Map serverProps = getServerProps();
+Map clientProps = Maps.newHashMapWithExpectedSize(2);
+clientProps.put(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "2");
+NUM_SLAVES_BASE = 4;
+setUpTestDriver(new ReadOnlyProps(serverProps.entrySet().iterator()), 
new ReadOnlyProps(clientProps.entrySet().iterator()));
+indexRebuildTaskRegionEnvironment =
+(RegionCoprocessorEnvironment) getUtility()
+.getRSForFirstRegionInTable(
+
PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME)
+
.getOnlineRegions(PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME)
+.get(0).getCoprocessorHost()
+
.findCoprocessorEnvironment(MetaDataRegionObserver.class.getName());
+MetaDataRegionObserver.initRebuildIndexConnectionProps(
+indexRebuildTaskRegionEnvironment.getConfiguration());
+}
+
+protected static Map getServerProps(){
 Map serverProps = Maps.newHashMapWithExpectedSize(10);
 serverProps.put("hbase.coprocessor.region.classes", 
FailingRegionObserver.class.getName());
 serverProps.put(HConstants.HBASE_RPC_TIMEOUT_KEY, "1");
@@ -142,19 +165,7 @@ public class MutableIndexFailureIT extends BaseTest {
  * because we want to control it's execution ourselves
  */
 serverProps.put(QueryServices.INDEX_REBUILD_TASK_INITIAL_DELAY, 
Long.toString(Long.MAX_VALUE));
-Map clientProps = 

  1   2   >