This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-connectors.git


The following commit(s) were added to refs/heads/master by this push:
     new e10e8f6  PHOENIX-7366 Use Phoenix 5.2 to compile phoenix-connectors
e10e8f6 is described below

commit e10e8f6c8b9518918e6b427b16a3be9b19df17b5
Author: Istvan Toth <[email protected]>
AuthorDate: Fri Jul 19 15:59:38 2024 +0200

    PHOENIX-7366 Use Phoenix 5.2 to compile phoenix-connectors
    
    also refactor maven project setup
    also merge phoenix5-spark3-it back into phoenix5-spark module
---
 phoenix5-hive-shaded/pom.xml                       |  60 +--
 phoenix5-hive/pom.xml                              | 503 +++++++++------------
 .../it/java/org/apache/phoenix/hive/HiveTezIT.java |   4 +
 .../src/{test => it}/resources/hbase-site.xml      |   0
 .../src/{test => it}/resources/hive-site.xml       |   0
 .../src/{test => it}/resources/log4j.properties    |   0
 .../src/{test => it}/resources/tez-site.xml        |   0
 .../phoenix/hive/mapreduce/PhoenixInputFormat.java |   4 +-
 .../hive/mapreduce/PhoenixRecordReader.java        |  10 +-
 .../org/apache/phoenix/hive/util/PhoenixUtil.java  |   2 +-
 phoenix5-spark-shaded/pom.xml                      |  29 +-
 phoenix5-spark/pom.xml                             | 336 ++++++--------
 .../org/apache/phoenix/spark/DataSourceApiIT.java  |  12 +-
 .../v2/reader/PhoenixDataSourceReader.java         |   1 -
 .../v2/reader/PhoenixInputPartitionReader.java     |   7 +-
 phoenix5-spark3-it/pom.xml                         | 260 -----------
 phoenix5-spark3-shaded/pom.xml                     |  33 +-
 phoenix5-spark3/pom.xml                            | 263 ++++++++---
 .../java/org/apache/phoenix/spark/AggregateIT.java |   0
 .../org/apache/phoenix/spark/DataSourceApiIT.java  |  13 +-
 .../java/org/apache/phoenix/spark/OrderByIT.java   |   0
 .../org/apache/phoenix/spark/SaltedTableIT.java    |   0
 .../java/org/apache/phoenix/spark/SparkUtil.java   |   0
 .../sql/connector/PhoenixTestingDataSource.java    |   0
 .../spark/sql/connector/PhoenixTestingTable.java   |   0
 .../reader/PhoenixTestPartitionReadFactory.java    |   0
 .../reader/PhoenixTestPartitionReader.java         |   0
 .../sql/connector/reader/PhoenixTestScan.java      |   0
 .../connector/reader/PhoenixTestScanBuilder.java   |   0
 .../connector/writer/PhoenixTestBatchWrite.java    |   0
 .../connector/writer/PhoenixTestDataWriter.java    |   0
 .../writer/PhoenixTestDataWriterFactory.java       |   0
 .../connector/writer/PhoenixTestWriteBuilder.java  |   0
 .../writer/PhoenixTestingWriterCommitMessage.java  |   0
 .../src/it/resources/globalSetup.sql               |   0
 .../src/it/resources/log4j.xml                     |   0
 .../src/it/resources/tenantSetup.sql               |   0
 .../src/it/resources/transactionTableSetup.sql     |   0
 .../phoenix/spark/AbstractPhoenixSparkIT.scala     |   0
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala  |   0
 .../spark/PhoenixSparkITTenantSpecific.scala       |   0
 .../connector/reader/PhoenixPartitionReader.java   |   7 +-
 pom.xml                                            | 273 +++++++----
 43 files changed, 856 insertions(+), 961 deletions(-)

diff --git a/phoenix5-hive-shaded/pom.xml b/phoenix5-hive-shaded/pom.xml
index 52fdade..b1be99a 100644
--- a/phoenix5-hive-shaded/pom.xml
+++ b/phoenix5-hive-shaded/pom.xml
@@ -48,16 +48,16 @@
       <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
       <scope>runtime</scope>
     </dependency>
-    <!-- maven-shade-plugin doesn't inherit dependency settings, we must 
duplicate them to avoid
-    adding the provided dependencies -->
+    <!-- maven-shade-plugin doesn't inherit dependency settings, we must
+      duplicate them to avoid adding the provided dependencies -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
       <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
       <scope>runtime</scope>
       <optional>true</optional>
     </dependency>
-    <!-- Hive dependencies cannot be specified in the phoenix-hive-common, 
because of
-    hbase transitive dependency version conflicts -->
+    <!-- Hive dependencies cannot be specified in the phoenix-hive-common,
+      because of hbase transitive dependency version conflicts -->
     <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-cli</artifactId>
@@ -115,11 +115,12 @@
         </exclusion>
       </exclusions>
     </dependency>
-    <!-- Dependencies below should be the same for Hive, Spark and Spark3 
shading config -->
-    <!-- Phoenix excludes commons-beanutils from the Omid dependency, but 
that's basically a bug.
-     We need to add it back, so that we don't depend on hadoop's 
common-beanutils, which may or
-     may not be shaded.
-     This can be removed once we use a Phoenix version that doesn't have this 
problem -->
+    <!-- Dependencies below should be the same for Hive, Spark and Spark3
+      shading config -->
+    <!-- Phoenix excludes commons-beanutils from the Omid dependency, but
+      that's basically a bug. We need to add it back, so that we don't depend 
on
+      hadoop's common-beanutils, which may or may not be shaded. This can be 
removed 
+      once we use a Phoenix version that doesn't have this problem -->
     <dependency>
       <groupId>commons-beanutils</groupId>
       <artifactId>commons-beanutils</artifactId>
@@ -131,10 +132,10 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <exclusions>
-          <exclusion>
-              <groupId>commons-beanutils</groupId>
-              <artifactId>commons-beanutils</artifactId>
-          </exclusion>
+        <exclusion>
+          <groupId>commons-beanutils</groupId>
+          <artifactId>commons-beanutils</artifactId>
+        </exclusion>
       </exclusions>
       <scope>provided</scope>
     </dependency>
@@ -291,8 +292,8 @@
       <artifactId>protobuf-java</artifactId>
       <scope>provided</scope>
     </dependency>
-    <!-- Other dependencies we don't want to shade in, but are not 
transitively excluded by the
-    above for some reason -->
+    <!-- Other dependencies we don't want to shade in, but are not transitively
+      excluded by the above for some reason -->
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
@@ -308,7 +309,7 @@
       <scope>provided</scope>
     </dependency>
   </dependencies>
-  
+
   <build>
     <plugins>
       <plugin>
@@ -362,26 +363,26 @@
           </filters>
           <transformers>
             <transformer
-                    
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"
 />
+              
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"
 />
             <transformer
-                    
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
               <resource>csv-bulk-load-config.properties</resource>
               <file>
                 ${project.basedir}/../config/csv-bulk-load-config.properties
               </file>
             </transformer>
             <transformer
-                    
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
               <resource>README.md</resource>
               <file>${project.basedir}/../README.md</file>
             </transformer>
             <transformer
-                    
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
               <resource>LICENSE.txt</resource>
               <file>${project.basedir}/../LICENSE</file>
             </transformer>
             <transformer
-                    
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
+              
implementation="org.apache.maven.plugins.shade.resource.IncludeResourceTransformer">
               <resource>NOTICE</resource>
               <file>${project.basedir}/../NOTICE</file>
             </transformer>
@@ -412,11 +413,10 @@
                 <exclude>org/apache/phoenix/**</exclude>
                 <exclude>org/apache/omid/**</exclude>
                 <!-- Do want/need to expose Tephra as well ? -->
-                <!-- See PHOENIX-7118
-                 Depending on the Spark classpath we may need to leave this 
unshaded, relocate
-                 it under org/apache/hadoop/shaded/ or under 
org/apache/hadoop/hbase/shaded/.
-                 The only thing that is guaranteed not to work is relocating 
it under 
-                 ${shaded.package} -->
+                <!-- See PHOENIX-7118 Depending on the Spark classpath we
+                  may need to leave this unshaded, relocate it under 
org/apache/hadoop/shaded/
+                  or under org/apache/hadoop/hbase/shaded/. The only thing 
that is guaranteed
+                  not to work is relocating it under ${shaded.package} -->
                 <exclude>org/apache/commons/configuration2/**</exclude>
               </excludes>
             </relocation>
@@ -451,8 +451,8 @@
               <pattern>com/google/inject/</pattern>
               
<shadedPattern>${shaded.package}.com.google.inject.</shadedPattern>
             </relocation>
-            <!-- This is protobuf 2.5.0 which is shaded to this package in 
hbase-shaded-client,
-             not the modified protobuf 3.x from hbase-thirdparty -->
+            <!-- This is protobuf 2.5.0 which is shaded to this package in
+              hbase-shaded-client, not the modified protobuf 3.x from 
hbase-thirdparty -->
             <relocation>
               <pattern>com/google/protobuf/</pattern>
               
<shadedPattern>${hbase.shaded.package}.com.google.protobuf.</shadedPattern>
@@ -579,8 +579,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <executions>
           <execution>
-             <id>default-compile</id>
-             <phase>none</phase>
+            <id>default-compile</id>
+            <phase>none</phase>
           </execution>
         </executions>
       </plugin>
diff --git a/phoenix5-hive/pom.xml b/phoenix5-hive/pom.xml
index 19a20cf..49b734c 100644
--- a/phoenix5-hive/pom.xml
+++ b/phoenix5-hive/pom.xml
@@ -31,206 +31,145 @@
     <version>6.0.0-SNAPSHOT</version>
   </parent>
   <artifactId>phoenix5-hive</artifactId>
-  <name>Phoenix Hive Connector for Phoenix 5</name>
+  <name>Phoenix Hive 3 Connector for Phoenix 5</name>
 
   <properties>
     <top.dir>${project.basedir}/..</top.dir>
     <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
     <tez.version>0.9.1</tez.version>
     <commons-lang3.version>3.9</commons-lang3.version>
-    <hive.version>${hive3.version}</hive.version>
     <calcite.version>1.16.0</calcite.version>
   </properties>
 
   <dependencies>
+    <!-- Phoenix dependencies -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
-      <scope>runtime</scope>
-      <optional>true</optional>
+      <artifactId>phoenix-core-client</artifactId>
     </dependency>
-
     <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-cli</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-common</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-core-server</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-exec</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
+      <groupId>org.apache.phoenix.thirdparty</groupId>
+      <artifactId>phoenix-shaded-guava</artifactId>
     </dependency>
 
-    <!-- These three dependencies are falsely flagged by dependency plugin -->
-    <dependency>
-      <groupId> org.apache.hive</groupId>
-      <artifactId>hive-serde</artifactId>
-      <version>${hive.version}</version>
-      <exclusions>
-        <!-- Fix conflict with Minicluster -->
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-      </exclusions>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId> org.apache.hive</groupId>
-      <artifactId>hive-storage-api</artifactId>
-      <version>${hive-storage.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId> org.apache.hive.shims</groupId>
-      <artifactId>hive-shims-common</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <!-- These are actually different between phoenix 5 and phoenix 4 -->
-    <dependency>
-      <groupId> org.apache.hive</groupId>
-      <artifactId>hive-standalone-metastore</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
+    <!-- HBase dependencies -->
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-mapreduce</artifactId>
+      <artifactId>hbase-protocol</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-zookeeper</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <artifactId>hbase-client</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-protocol-shaded</artifactId>
+      <artifactId>hbase-common</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-endpoint</artifactId>
+      <artifactId>hbase-mapreduce</artifactId>
       <scope>provided</scope>
     </dependency>
+
+    <!-- Hadoop dependencies -->
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-client</artifactId>
+      <artifactId>hadoop-common</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.zookeeper</groupId>
-      <artifactId>zookeeper</artifactId>
-      <version>${zookeeper.version}</version>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
       <scope>provided</scope>
     </dependency>
 
+    <!-- Hive dependencies -->
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-testing-util</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.phoenix.thirdparty</groupId>
-      <artifactId>phoenix-shaded-guava</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-lang3</artifactId>
-      <version>${commons-lang3.version}</version>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${hive.version}</version>
+      <scope>provided</scope>
+      <exclusions>
+        <!-- Hadoop won't work with Guava 19 -->
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-protocol</artifactId>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${hive.version}</version>
       <scope>provided</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
-
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-client</artifactId>
+      <groupId> org.apache.hive</groupId>
+      <artifactId>hive-storage-api</artifactId>
+      <version>${hive-storage.version}</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
+      <groupId> org.apache.hive.shims</groupId>
+      <artifactId>hive-shims-common</artifactId>
+      <version>${hive.version}</version>
       <scope>provided</scope>
     </dependency>
+    <!-- Needed to get the older Derby that Hive requires -->
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
+      <groupId> org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore</artifactId>
+      <version>${hive.version}</version>
       <scope>provided</scope>
     </dependency>
 
+    <!-- logging API -->
     <dependency>
       <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
+      <artifactId>slf4j-api</artifactId>
       <scope>provided</scope>
     </dependency>
 
-    <!-- Test dependencies -->
-    <dependency>
-      <groupId>com.google.code.findbugs</groupId>
-      <artifactId>jsr305</artifactId>
-      <version>3.0.0</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-compress</artifactId>
-      <version>${commons-compress.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-io</groupId>
-      <artifactId>commons-io</artifactId>
-      <version>${commons-io.version}</version>
-      <scope>test</scope>
-    </dependency>
+    <!-- Phoenix Test dependencies -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
       <artifactId>phoenix-core</artifactId>
       <classifier>tests</classifier>
-      <scope>test</scope>
     </dependency>
+
     <dependency>
-      <groupId>org.apache.omid</groupId>
-      <artifactId>omid-tso-server-hbase2.x</artifactId>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
       <scope>test</scope>
     </dependency>
+
+    <!-- HBase test dependencies -->
     <dependency>
-      <groupId>org.apache.omid</groupId>
-      <artifactId>omid-tso-server-hbase2.x</artifactId>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-zookeeper</artifactId>
       <scope>test</scope>
-      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-testing-util</artifactId>
+      <scope>test</scope> <!-- ? -->
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
@@ -244,6 +183,8 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
+
+    <!-- Hadoop test dependencies -->
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
@@ -254,179 +195,140 @@
         </exclusion>
       </exclusions>
       <type>test-jar</type>
+      <scope>test</scope> <!-- ? -->
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>${commons-lang3.version}</version>
+    </dependency>
+
+    <!-- Hive test dependencies -->
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-cli</artifactId>
+      <version>${hive.version}</version>
       <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
+
+    <!-- Maybe needed for (ignored) Tez Test ? -->
+    <!-- <dependency> -->
+    <!-- <groupId> org.apache.hive</groupId> -->
+    <!-- <artifactId>hive-llap-client</artifactId> -->
+    <!-- <version>${hive.version}</version> -->
+    <!-- <exclusions> -->
+    <!-- <exclusion> -->
+    <!-- <groupId>com.google.guava</groupId> -->
+    <!-- <artifactId>guava</artifactId> -->
+    <!-- </exclusion> -->
+    <!-- </exclusions> -->
+    <!-- <scope>test</scope> -->
+    <!-- </dependency> -->
+    <!-- <dependency> -->
+    <!-- <groupId> org.apache.hive</groupId> -->
+    <!-- <artifactId>hive-serde</artifactId> -->
+    <!-- <version>${hive.version}</version> -->
+    <!-- <exclusions> -->
+    <!-- Fix conflict with Minicluster -->
+    <!-- <exclusion> -->
+    <!-- <groupId>io.netty</groupId> -->
+    <!-- <artifactId>*</artifactId> -->
+    <!-- </exclusion> -->
+    <!-- </exclusions> -->
+    <!-- <scope>test</scope> -->
+    <!-- </dependency> -->
+    <!-- <dependency> -->
+    <!-- <groupId> org.apache.hive</groupId> -->
+    <!-- <artifactId>hive-storage-api</artifactId> -->
+    <!-- <version>${hive-storage.version}</version> -->
+    <!-- <scope>test</scope> -->
+    <!-- </dependency> -->
+
+    <!-- Misc test dependencies -->
     <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
       <scope>test</scope>
     </dependency>
+
     <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <groupId>com.google.code.findbugs</groupId>
+      <artifactId>jsr305</artifactId>
+      <version>3.0.0</version>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.tez</groupId>
-      <artifactId>tez-tests</artifactId>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+      <version>${commons-compress.version}</version>
       <scope>test</scope>
-      <version>${tez.version}</version>
-      <type>test-jar</type>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-api</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
     <dependency>
-      <groupId>org.apache.tez</groupId>
-      <artifactId>tez-dag</artifactId>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>${commons-io.version}</version>
       <scope>test</scope>
-      <version>${tez.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-api</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
+
+    <!-- Logging test dependencies -->
     <dependency>
       <groupId>org.apache.logging.log4j</groupId>
       <artifactId>log4j-api</artifactId>
-      <scope>provided</scope>
+      <scope>test</scope>
+      <version>${log4j2.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.logging.log4j</groupId>
+      <artifactId>log4j-1.2-api</artifactId>
+      <scope>test</scope>
       <version>${log4j2.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.logging.log4j</groupId>
       <artifactId>log4j-core</artifactId>
-      <scope>provided</scope>
+      <scope>test</scope>
       <version>${log4j2.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.logging.log4j</groupId>
       <artifactId>log4j-slf4j-impl</artifactId>
-      <scope>provided</scope>
+      <scope>test</scope>
       <version>${log4j2.version}</version>
     </dependency>
 
-
-<!-- Mark every HBase and Hadoop jar as provided -->
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-annotations</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-metrics-api</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-metrics</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-server</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-hadoop-compat</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-hadoop2-compat</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase.thirdparty</groupId>
-      <artifactId>hbase-shaded-miscellaneous</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase.thirdparty</groupId>
-      <artifactId>hbase-shaded-protobuf</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-annotations</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-auth</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-api</artifactId>
-      <scope>provided</scope>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
     </dependency>
+
     <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-      <scope>provided</scope>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <scope>test</scope>
     </dependency>
   </dependencies>
 
   <build>
     <plugins>
-      <plugin>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <configuration>
-          <ignoreNonCompile>true</ignoreNonCompile>
-          <ignoredUnusedDeclaredDependencies>
-            <ignoredUnusedDeclaredDependency>
-               org.slf4j:slf4j-api
-            </ignoredUnusedDeclaredDependency>
-            <ignoredUnusedDeclaredDependency>
-              org.apache.commons:commons-lang3
-            </ignoredUnusedDeclaredDependency>
-          </ignoredUnusedDeclaredDependencies>
-          <ignoredUsedUndeclaredDependencies>
-            <!-- I couldn't find it referenced anywhere in the phoenix-hive 
codebase -->
-            <ignoredUsedUndeclaredDependency>
-              org.apache.calcite.avatica:avatica
-            </ignoredUsedUndeclaredDependency>
-            <ignoredUnusedDeclaredDependency>
-              org.antlr:antlr-runtime
-            </ignoredUnusedDeclaredDependency>
-          </ignoredUsedUndeclaredDependencies>
-          <ignoredNonTestScopedDependencies>
-              org.antlr:antlr-runtime
-          </ignoredNonTestScopedDependencies>
-        </configuration>
-      </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>build-helper-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>add-parent-test-source</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>add-test-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>${project.parent.basedir}/src/it/java</source>
-                <source>${project.parent.basedir}/src/test/java</source>
-              </sources>
-            </configuration>
-          </execution>
-        </executions>
       </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-failsafe-plugin</artifactId>
         <configuration>
           <!-- The Hbase + Hive minicluster setup seems very fragile, so
-          we make sure to run everything as serially as possible -->
+            we make sure to run everything as serially as possible -->
           <forkCount>1</forkCount>
           <reuseForks>false</reuseForks>
         </configuration>
@@ -436,7 +338,8 @@
             <configuration>
               <forkCount>1</forkCount>
               <reuseForks>false</reuseForks>
-              
<groups>org.apache.phoenix.end2end.ParallelStatsDisabledTest</groups>
+              <groups>
+                org.apache.phoenix.end2end.ParallelStatsDisabledTest</groups>
             </configuration>
             <goals>
               <goal>integration-test</goal>
@@ -446,42 +349,83 @@
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-resources-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-resources</id>
-            <phase>generate-resources</phase>
-            <goals>
-              <goal>copy-resources</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/test-classes
-              </outputDirectory>
-              <overwrite>true</overwrite>
-              <resources>
-                <resource>
-                  
<directory>${project.parent.basedir}/src/test/resources</directory>
-                </resource>
-              </resources>
-            </configuration>
-          </execution>
-        </executions>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <configuration>
+          <ignoredUnusedDeclaredDependencies>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-testing-util
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-it:test-jar
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hadoop:hadoop-hdfs
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hive.shims:hive-shims-common
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hive:hive-standalone-metastore
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hive:hive-storage-api
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.logging.log4j:log4j-core
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.logging.log4j:log4j-slf4j-impl
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.logging.log4j:log4j-slf4j-impl
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.logging.log4j:log4j-1.2-api
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.logging.log4j:log4j-slf4j-impl
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.commons:commons-lang3
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.slf4j:slf4j-api
+            </ignoredUnusedDeclaredDependency>
+            ignoredUnusedDeclaredDependency>
+          </ignoredUnusedDeclaredDependencies>
+          <ignoredUsedUndeclaredDependencies>
+            <ignoredUsedUndeclaredDependency>
+              org.antlr:antlr-runtime
+            </ignoredUsedUndeclaredDependency>
+            <!-- I couldn't find it referenced anywhere in the 
phoenix-hivecodebase -->
+            <ignoredUsedUndeclaredDependency>
+              org.apache.calcite.avatica:avatica
+            </ignoredUsedUndeclaredDependency>
+          </ignoredUsedUndeclaredDependencies>
+          <ignoredNonTestScopedDependencies>
+            <ignoredNonTestScopedDependency>
+              org.antlr:antlr-runtime
+            </ignoredNonTestScopedDependency>
+          </ignoredNonTestScopedDependencies>
+        </configuration>
       </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-compiler-plugin</artifactId>
+        <!-- This may not work due to bootclasspath issues. Make sure to
+          compile phoenix5-hive with JDK8 -->
         <configuration>
           <source>8</source>
           <target>8</target>
         </configuration>
       </plugin>
-      <!-- Taken from phoenix-client-parent
-      this should be kept in sync with Phoenix as much as possible -->
     </plugins>
   </build>
 
-   <dependencyManagement>
+  <dependencyManagement>
     <dependencies>
       <dependency>
         <groupId>org.apache.calcite</groupId>
@@ -489,7 +433,8 @@
         <version>${calcite.version}</version>
         <exclusions>
           <exclusion>
-            <!-- PHOENIX-6478: exclude a dependency that is not required and 
not available at Maven Central -->
+            <!-- PHOENIX-6478: exclude a dependency that is not required
+              and not available at Maven Central -->
             <groupId>org.pentaho</groupId>
             <artifactId>pentaho-aggdesigner-algorithm</artifactId>
           </exclusion>
diff --git a/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java 
b/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
index 2931f93..14bb5b4 100644
--- a/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
+++ b/phoenix5-hive/src/it/java/org/apache/phoenix/hive/HiveTezIT.java
@@ -19,7 +19,11 @@
 package org.apache.phoenix.hive;
 
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 
+// I was unable to find a combination classpath that works with Phoenix 5.2, 
and the unrelocated
+// hive-exec uberjar that is added by the tez job runner.
+@Ignore
 public class HiveTezIT extends HivePhoenixStoreIT {
 
     @BeforeClass
diff --git a/phoenix5-hive/src/test/resources/hbase-site.xml 
b/phoenix5-hive/src/it/resources/hbase-site.xml
similarity index 100%
rename from phoenix5-hive/src/test/resources/hbase-site.xml
rename to phoenix5-hive/src/it/resources/hbase-site.xml
diff --git a/phoenix5-hive/src/test/resources/hive-site.xml 
b/phoenix5-hive/src/it/resources/hive-site.xml
similarity index 100%
rename from phoenix5-hive/src/test/resources/hive-site.xml
rename to phoenix5-hive/src/it/resources/hive-site.xml
diff --git a/phoenix5-hive/src/test/resources/log4j.properties 
b/phoenix5-hive/src/it/resources/log4j.properties
similarity index 100%
rename from phoenix5-hive/src/test/resources/log4j.properties
rename to phoenix5-hive/src/it/resources/log4j.properties
diff --git a/phoenix5-hive/src/test/resources/tez-site.xml 
b/phoenix5-hive/src/it/resources/tez-site.xml
similarity index 100%
rename from phoenix5-hive/src/test/resources/tez-site.xml
rename to phoenix5-hive/src/it/resources/tez-site.xml
diff --git 
a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
 
b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
index 6cb82cb..d1d3b59 100644
--- 
a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+++ 
b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.db.DBWritable;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.phoenix.compile.QueryPlan;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
 import org.apache.phoenix.hive.ppd.PhoenixPredicateDecomposer;
 import org.apache.phoenix.hive.ql.index.IndexSearchCondition;
@@ -177,7 +177,7 @@ public class PhoenixInputFormat<T extends DBWritable> 
implements InputFormat<Wri
                     for (int i = 0, limit = scans.size(); i < limit; i++) {
                         LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " + 
Bytes
                                 .toStringBinary(scans.get(i).getAttribute
-                                        
(BaseScannerRegionObserver.EXPECTED_UPPER_REGION_KEY)));
+                                        
(BaseScannerRegionObserverConstants.EXPECTED_UPPER_REGION_KEY)));
                     }
                 }
 
diff --git 
a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
 
b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
index 66c9199..63c5d31 100644
--- 
a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
+++ 
b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixRecordReader.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.lib.db.DBWritable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
 import org.apache.phoenix.hive.PhoenixRowKey;
 import org.apache.phoenix.hive.util.PhoenixStorageHandlerUtil;
 import org.apache.phoenix.iterate.ConcatResultIterator;
@@ -99,8 +99,8 @@ public class PhoenixRecordReader<T extends DBWritable> 
implements
 
             for (int i = 0, limit = scans.size(); i < limit; i++) {
                 LOG.debug("EXPECTED_UPPER_REGION_KEY[" + i + "] : " +
-                        
Bytes.toStringBinary(scans.get(i).getAttribute(BaseScannerRegionObserver
-                                .EXPECTED_UPPER_REGION_KEY)));
+                        Bytes.toStringBinary(scans.get(i).getAttribute(
+                            
BaseScannerRegionObserverConstants.EXPECTED_UPPER_REGION_KEY)));
             }
         }
 
@@ -113,8 +113,8 @@ public class PhoenixRecordReader<T extends DBWritable> 
implements
             long renewScannerLeaseThreshold = 
queryPlan.getContext().getConnection()
                     .getQueryServices().getRenewLeaseThresholdMilliSeconds();
             for (Scan scan : scans) {
-                
scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, Bytes
-                        .toBytes(true));
+                
scan.setAttribute(BaseScannerRegionObserverConstants.SKIP_REGION_BOUNDARY_CHECK,
+                    Bytes.toBytes(true));
                 ScanMetricsHolder scanMetricsHolder = 
ScanMetricsHolder.getInstance(readMetrics, tableName, scan, 
ctx.getConnection().getLogLevel());
                 final TableResultIterator tableResultIterator = new 
TableResultIterator(
                         
queryPlan.getContext().getConnection().getMutationState(), scan, 
scanMetricsHolder,
diff --git 
a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java 
b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
index 892aed9..fc1b2c1 100644
--- a/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
+++ b/phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixUtil.java
@@ -20,7 +20,7 @@ package org.apache.phoenix.hive.util;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
+import 
org.apache.phoenix.coprocessorclient.MetaDataProtocol.MetaDataMutationResult;
 import org.apache.phoenix.hive.constants.PhoenixStorageHandlerConstants;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.MetaDataClient;
diff --git a/phoenix5-spark-shaded/pom.xml b/phoenix5-spark-shaded/pom.xml
index f9fb93d..a96d142 100644
--- a/phoenix5-spark-shaded/pom.xml
+++ b/phoenix5-spark-shaded/pom.xml
@@ -53,10 +53,10 @@
       <scope>provided</scope>
     </dependency>
 
-    <!-- Phoenix excludes commons-beanutils from the Omid dependency, but 
that's basically a bug
-     We need to add it back, so that we don't depend on hadoop's 
common-beanutils, which may or
-     may not be shaded.
-     This can be removed once we use a Phoenix version that doesn't have this 
problem -->
+    <!-- Phoenix excludes commons-beanutils from the Omid dependency, but
+      that's basically a bug We need to add it back, so that we don't depend on
+      hadoop's common-beanutils, which may or may not be shaded. This can be 
removed
+      once we use a Phoenix version that doesn't have this problem -->
     <dependency>
       <groupId>commons-beanutils</groupId>
       <artifactId>commons-beanutils</artifactId>
@@ -222,8 +222,8 @@
       <artifactId>protobuf-java</artifactId>
       <scope>provided</scope>
     </dependency>
-    <!-- Other dependencies we don't want to shade in, but are not 
transitively excluded by the
-    above for some reason -->
+    <!-- Other dependencies we don't want to shade in, but are not transitively
+      excluded by the above for some reason -->
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
@@ -338,19 +338,18 @@
                 <exclude>org/w3c/dom/**</exclude>
                 <exclude>org/xml/sax/**</exclude>
                 <!-- Extras compared to Hadoop -->
-                <!-- Hbase classes - Maybe these could be shaded as well ? -->
+                <!-- Hbase classes - Maybe these could be shaded as well
+                  ? -->
                 <exclude>org/apache/hbase/**</exclude>
                 <!-- We use the spark classpath directly -->
                 <exclude>org/apache/spark/**</exclude>
                 <!-- Phoenix classes -->
-                <exclude>org/apache/phoenix/**</exclude>
                 <exclude>org/apache/omid/**</exclude>
                 <!-- Do want/need to expose Tephra as well ? -->
-                <!-- See PHOENIX-7118
-                 Depending on the Spark classpath we may need to leave this 
unshaded, relocate
-                 it under org/apache/hadoop/shaded/ or under 
org/apache/hadoop/hbase/shaded/.
-                 The only thing that is guaranteed not to work is relocating 
it under
-                 ${shaded.package} -->
+                <!-- See PHOENIX-7118 Depending on the Spark classpath we
+                  may need to leave this unshaded, relocate it under 
org/apache/hadoop/shaded/
+                  or under org/apache/hadoop/hbase/shaded/. The only thing 
that is guaranteed
+                  not to work is relocating it under ${shaded.package} -->
                 <exclude>org/apache/commons/configuration2/**</exclude>
               </excludes>
             </relocation>
@@ -385,8 +384,8 @@
               <pattern>com/google/inject/</pattern>
               
<shadedPattern>${shaded.package}.com.google.inject.</shadedPattern>
             </relocation>
-            <!-- This is protobuf 2.5.0 which is shaded to this package in 
hbase-shaded-client,
-             not the modified protobuf 3.x from hbase-thirdparty -->
+            <!-- This is protobuf 2.5.0 which is shaded to this package in
+              hbase-shaded-client, not the modified protobuf 3.x from 
hbase-thirdparty -->
             <relocation>
               <pattern>com/google/protobuf/</pattern>
               
<shadedPattern>${hbase.shaded.package}.com.google.protobuf.</shadedPattern>
diff --git a/phoenix5-spark/pom.xml b/phoenix5-spark/pom.xml
index 17e02bb..88f159f 100644
--- a/phoenix5-spark/pom.xml
+++ b/phoenix5-spark/pom.xml
@@ -31,7 +31,7 @@
     <version>6.0.0-SNAPSHOT</version>
   </parent>
   <artifactId>phoenix5-spark</artifactId>
-  <name>Phoenix Spark Connector for Phoenix 5</name>
+  <name>Phoenix Spark 2 Connector for Phoenix 5</name>
 
   <properties>
     <top.dir>${project.basedir}/..</top.dir>
@@ -59,7 +59,8 @@
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-core_${scala.binary.version}</artifactId>
       <version>${spark.version}</version>
-        <exclusions>
+      <scope>provided</scope>
+      <exclusions>
         <!-- The shaded hadoop-client libraries conflict with 
hbase-shaded-mapreduce -->
         <exclusion>
           <groupId>org.apache.hadoop</groupId>
@@ -70,7 +71,6 @@
           <artifactId>hadoop-client-runtime</artifactId>
         </exclusion>
       </exclusions>
-      <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
@@ -99,7 +99,11 @@
 
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
+      <artifactId>phoenix-core-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-core-server</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.phoenix</groupId>
@@ -117,12 +121,12 @@
     </dependency>
     <dependency>
       <groupId>org.apache.omid</groupId>
-      <artifactId>omid-tso-server-hbase2.x</artifactId>
+      <artifactId>omid-tso-server</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.omid</groupId>
-      <artifactId>omid-tso-server-hbase2.x</artifactId>
+      <artifactId>omid-tso-server</artifactId>
       <scope>test</scope>
       <type>test-jar</type>
     </dependency>
@@ -141,10 +145,10 @@
       <scope>test</scope>
     </dependency>
 
-
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
       <exclusions>
         <exclusion>
           <groupId>log4j</groupId>
@@ -171,12 +175,12 @@
           <artifactId>netty</artifactId>
         </exclusion>
       </exclusions>
-      <scope>provided</scope>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <scope>provided</scope>
       <exclusions>
         <exclusion>
           <groupId>log4j</groupId>
@@ -203,13 +207,13 @@
           <artifactId>netty</artifactId>
         </exclusion>
       </exclusions>
-      <scope>provided</scope>
     </dependency>
 
 
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-client</artifactId>
+      <scope>provided</scope>
       <exclusions>
         <exclusion>
           <groupId>log4j</groupId>
@@ -285,6 +289,7 @@
     <dependency>
       <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-common</artifactId>
+      <scope>provided</scope>
       <exclusions>
         <exclusion>
           <groupId>log4j</groupId>
@@ -414,202 +419,159 @@
     </dependency>
 
     <dependency>
-      <!-- Why is this not provided transitively via Phoenix ? -->
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-testing-util</artifactId>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <version>${zookeeper.version}</version>
       <scope>test</scope>
     </dependency>
 
-    <!-- Mark every Hadoop jar as provided -->
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-annotations</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-auth</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-api</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-distcp</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-client</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-common</artifactId>
-      <scope>provided</scope>
-    </dependency>
     <!-- We want to take the implementation from Spark -->
     <dependency>
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-log4j12</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-      <version>2.5.0</version>
-      <scope>provided</scope>
+      <scope>test</scope>
     </dependency>
   </dependencies>
 
   <build>
-      <plugins>
-        <plugin>
-          <groupId>org.codehaus.mojo</groupId>
-          <artifactId>build-helper-maven-plugin</artifactId>
-          <executions>
-            <execution>
-              <id>add-test-source</id>
-              <phase>generate-sources</phase>
-              <goals>
-                <goal>add-test-source</goal>
-              </goals>
-              <configuration>
-                <sources>
-                  <source>src/it/java</source>
-                  <source>src/it/scala</source>
-                </sources>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-        <plugin>
-          <artifactId>maven-dependency-plugin</artifactId>
-          <configuration>
-            <ignoreNonCompile>true</ignoreNonCompile>
-            <ignoredUnusedDeclaredDependencies>
-              <!-- These are all used -->
-              <ignoredUnusedDeclaredDependency>
-                org.apache.hadoop:hadoop-hdfs
-              </ignoredUnusedDeclaredDependency>
-              <ignoredUnusedDeclaredDependency>
-                org.apache.hbase:hbase-it
-              </ignoredUnusedDeclaredDependency>
-            </ignoredUnusedDeclaredDependencies>
-          </configuration>
-        </plugin>
-        <plugin>
-          <groupId>net.alchim31.maven</groupId>
-          <artifactId>scala-maven-plugin</artifactId>
-          <configuration>
-            <charset>${project.build.sourceEncoding}</charset>
-            <jvmArgs>
-              <jvmArg>-Xmx1024m</jvmArg>
-            </jvmArgs>
-            <scalaVersion>${scala.version}</scalaVersion>
-            <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
-          </configuration>
-          <executions>
-            <execution>
-              <id>scala-compile-first</id>
-              <phase>process-resources</phase>
-              <goals>
-                <goal>add-source</goal>
-                <goal>compile</goal>
-              </goals>
-            </execution>
-            <execution>
-              <id>scala-test-compile</id>
-              <phase>process-test-resources</phase>
-              <goals>
-                <goal>testCompile</goal>
-              </goals>
-            </execution>
-          </executions>
-        </plugin>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <configuration>
+          <ignoredUnusedDeclaredDependencies>
+            <!-- These are test runtime dependencies that Maven has concept of 
-->
+            <ignoredUnusedDeclaredDependency>
+              org.apache.zookeeper:zookeeper
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.omid:omid-tso-server
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hadoop:hadoop-hdfs
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-it
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-testing-util
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.slf4j:slf4j-log4j12
+            </ignoredUnusedDeclaredDependency>
+          </ignoredUnusedDeclaredDependencies>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>net.alchim31.maven</groupId>
+        <artifactId>scala-maven-plugin</artifactId>
+        <configuration>
+          <charset>${project.build.sourceEncoding}</charset>
+          <jvmArgs>
+            <jvmArg>-Xmx1024m</jvmArg>
+          </jvmArgs>
+          <scalaVersion>${scala.version}</scalaVersion>
+          <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
+        </configuration>
+        <executions>
+          <execution>
+            <id>scala-compile-first</id>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>add-source</goal>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>scala-test-compile</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>testCompile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
 
-        <plugin>
-          <groupId>org.scalatest</groupId>
-          <artifactId>scalatest-maven-plugin</artifactId>
-          <version>1.0</version>
-          <configuration>
-            
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-            <junitxml>.</junitxml>
-            <filereports>WDF TestSuite.txt</filereports>
-            <skipTests>${skip.scalatest}</skipTests>
-          </configuration>
-          <executions>
-            <execution>
-              <id>integration-test</id>
-              <phase>integration-test</phase>
-              <goals>
-                <goal>test</goal>
-              </goals>
-              <configuration>
-                <!-- Need this false until we can switch to JUnit 4.13 due 
-                  to https://github.com/junit-team/junit4/issues/1223 -->
-                <parallel>false</parallel>
-                <tagsToExclude>Integration-Test</tagsToExclude>
-                <argLine>-XX:ReservedCodeCacheSize=512m ${argLine}</argLine>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-failsafe-plugin</artifactId>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-compiler-plugin</artifactId>
-          <configuration>
-            <source>1.8</source>
-            <target>1.8</target>
-          </configuration>
-        </plugin>
-        <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-javadoc-plugin</artifactId>
+      <plugin>
+        <groupId>org.scalatest</groupId>
+        <artifactId>scalatest-maven-plugin</artifactId>
+        <version>1.0</version>
+        <configuration>
+          
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
+          <junitxml>.</junitxml>
+          <filereports>WDF TestSuite.txt</filereports>
+          <skipTests>${skip.scalatest}</skipTests>
+        </configuration>
+        <executions>
+          <execution>
+            <id>integration-test</id>
+            <phase>integration-test</phase>
+            <goals>
+              <goal>test</goal>
+            </goals>
+            <configuration>
+              <!-- Need this false until we can switch to JUnit 4.13 due
+                to https://github.com/junit-team/junit4/issues/1223 -->
+              <parallel>false</parallel>
+              <tagsToExclude>Integration-Test</tagsToExclude>
+              <argLine>-XX:ReservedCodeCacheSize=512m ${argLine}</argLine>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.8</source>
+          <target>1.8</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <configuration>
+          <skip>${skip.spark.javadoc}</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>empty-javadoc-jar</id>
+            <phase>package</phase>
+            <goals>
+              <goal>jar</goal>
+            </goals>
             <configuration>
-                <skip>${skip.spark.javadoc}</skip>
+              <classifier>javadoc</classifier>
+              <classesDirectory>${basedir}/javadoc</classesDirectory>
             </configuration>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-jar-plugin</artifactId>
-          <executions>
-            <execution>
-              <id>empty-javadoc-jar</id>
-              <phase>package</phase>
-              <goals>
-                <goal>jar</goal>
-              </goals>
-              <configuration>
-                <classifier>javadoc</classifier>
-                <classesDirectory>${basedir}/javadoc</classesDirectory>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-        <!-- Taken from phoenix-client-parent
-        this should be kept in sync with Phoenix as much as possible -->
-      </plugins>
+          </execution>
+        </executions>
+      </plugin>
+      <!-- Taken from phoenix-client-parent this should be kept in sync with
+        Phoenix as much as possible -->
+    </plugins>
   </build>
-    <profiles>
+  <profiles>
     <profile>
       <!-- This only applies when building with 5.1 -->
       <id>exclude-tephra</id>
diff --git 
a/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java 
b/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
index a49bc0a..507fc8e 100644
--- a/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
+++ b/phoenix5-spark/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
@@ -41,6 +41,7 @@ import java.util.Arrays;
 import static 
org.apache.phoenix.spark.datasource.v2.PhoenixDataSource.JDBC_URL;
 import static 
org.apache.phoenix.spark.datasource.v2.PhoenixDataSource.ZOOKEEPER_URL;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_ZK;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 import static org.junit.Assert.*;
 
@@ -104,6 +105,12 @@ public class DataSourceApiIT extends 
ParallelStatsDisabledIT {
             .save();
 
             // Use jdbcUrl
+            // In Phoenix 5.2+ getUrl() return a JDBC URL, in earlier versions 
it returns a ZK
+            // quorum
+            String jdbcUrl = getUrl();
+            if (!jdbcUrl.startsWith(JDBC_PROTOCOL)) {
+                jdbcUrl = JDBC_PROTOCOL_ZK + JDBC_PROTOCOL_SEPARATOR + jdbcUrl;
+            }
             Dataset<Row> df2 =
                     spark.createDataFrame(
                         Arrays.asList(RowFactory.create(2, "x")),
@@ -111,7 +118,7 @@ public class DataSourceApiIT extends 
ParallelStatsDisabledIT {
 
             df2.write().format("phoenix").mode(SaveMode.Overwrite)
                 .option("table", tableName)
-                .option(JDBC_URL, JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
getUrl())
+                .option(JDBC_URL, jdbcUrl)
                 .save();
 
             // Use default from hbase-site.xml
@@ -148,8 +155,7 @@ public class DataSourceApiIT extends 
ParallelStatsDisabledIT {
             // Use jdbcUrl
             Dataset df2Read = spark.read().format("phoenix")
                     .option("table", tableName)
-                    .option(PhoenixDataSource.JDBC_URL,
-                                JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
getUrl())
+                    .option(PhoenixDataSource.JDBC_URL, jdbcUrl)
                     .load();
 
             assertEquals(3l, df2Read.count());
diff --git 
a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
 
b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
index a7aca22..84d83f3 100644
--- 
a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
+++ 
b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixDataSourceReader.java
@@ -29,7 +29,6 @@ import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.mapreduce.PhoenixInputSplit;
 import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
-import org.apache.phoenix.protobuf.ProtobufUtil;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.schema.PTableImpl;
 import org.apache.phoenix.spark.FilterExpressionCompiler;
diff --git 
a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
 
b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
index a7f6240..3a4ef65 100644
--- 
a/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
+++ 
b/phoenix5-spark/src/main/java/org/apache/phoenix/spark/datasource/v2/reader/PhoenixInputPartitionReader.java
@@ -17,7 +17,6 @@
  */
 package org.apache.phoenix.spark.datasource.v2.reader;
 
-import com.google.protobuf.InvalidProtocolBufferException;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -33,9 +32,9 @@ import org.apache.hadoop.hbase.util.Bytes;
 
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
 import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.coprocessor.generated.PTableProtos.PTable;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
 import org.apache.phoenix.iterate.ConcatResultIterator;
 import org.apache.phoenix.iterate.LookAheadResultIterator;
 import org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
@@ -98,7 +97,7 @@ public class PhoenixInputPartitionReader implements 
InputPartitionReader<Interna
             PTable pTable = null;
             try {
                 pTable = PTable.parseFrom(options.getPTableCacheBytes());
-            } catch (InvalidProtocolBufferException e) {
+            } catch (Exception e) {
                 throw new RuntimeException("Parsing the PTable Cache Bytes is 
failing ", e);
             }
             org.apache.phoenix.schema.PTable table = 
PTableImpl.createFromProto(pTable);
@@ -134,7 +133,7 @@ public class PhoenixInputPartitionReader implements 
InputPartitionReader<Interna
                     .getQueryServices().getRenewLeaseThresholdMilliSeconds();
             for (Scan scan : scans) {
                 // For MR, skip the region boundary check exception if we 
encounter a split. ref: PHOENIX-2599
-                
scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, 
Bytes.toBytes(true));
+                
scan.setAttribute(BaseScannerRegionObserverConstants.SKIP_REGION_BOUNDARY_CHECK,
 Bytes.toBytes(true));
 
                 PeekingResultIterator peekingResultIterator;
                 ScanMetricsHolder scanMetricsHolder =
diff --git a/phoenix5-spark3-it/pom.xml b/phoenix5-spark3-it/pom.xml
deleted file mode 100644
index 5a8aaf1..0000000
--- a/phoenix5-spark3-it/pom.xml
+++ /dev/null
@@ -1,260 +0,0 @@
-<?xml version='1.0'?>
-<!--
-
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
-
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0";
-  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-  <parent>
-    <artifactId>phoenix-connectors</artifactId>
-    <groupId>org.apache.phoenix</groupId>
-    <version>6.0.0-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-
-  <artifactId>phoenix5-spark3-it</artifactId>
-  <name>Tests for Phoenix Spark 3 Connector for Phoenix 5</name>
-
-  <properties>
-    <top.dir>${project.basedir}/..</top.dir>
-    <spark.version>${spark3.version}</spark.version>
-    <scala.version>${scala.version.for.spark3}</scala.version>
-    
<scala.binary.version>${scala.binary.version.for.spark3}</scala.binary.version>
-  </properties>
-
-  <dependencies>
-
-    <!-- Spark and scala dependencies -->
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-      <exclusions>
-        <!-- The shaded hadoop-client libraries conflict with the minicluster 
-->
-        <exclusion>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-api</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client-runtime</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-sql_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-reflect</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <!-- Phoenix dependencies -->
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix5-spark3</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
-      <scope>test</scope>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
-      <classifier>tests</classifier>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.omid</groupId>
-      <artifactId>omid-tso-server-hbase2.x</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.omid</groupId>
-      <artifactId>omid-tso-server-hbase2.x</artifactId>
-      <scope>test</scope>
-      <type>test-jar</type>
-    </dependency>
-
-    <!-- Test dependencies -->
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.scalactic</groupId>
-      <artifactId>scalactic_${scala.binary.version}</artifactId>
-      <!-- Newer versions would require refactor -->
-      <version>3.1.4</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <version>3.1.4</version>
-      <scope>test</scope>
-    </dependency>
-
-    <!-- Minicluster dependencies -->
-    <dependency>
-      <!-- Why is this not provided transitively via Phoenix ? -->
-      <groupId>org.apache.zookeeper</groupId>
-      <artifactId>zookeeper</artifactId>
-      <version>${zookeeper.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-it</artifactId>
-      <type>test-jar</type>
-      <!-- NOT Test scope to get transitive dependencies -->
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
-      <!-- NOT Test scope to get transitive dependencies -->
-    </dependency>
-
-
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>add-test-source</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>add-test-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>src/it/java</source>
-                <source>src/it/scala</source>
-              </sources>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>net.alchim31.maven</groupId>
-        <artifactId>scala-maven-plugin</artifactId>
-        <configuration>
-          <charset>${project.build.sourceEncoding}</charset>
-          <jvmArgs>
-            <jvmArg>-Xmx1024m</jvmArg>
-          </jvmArgs>
-          <scalaVersion>${scala.version}</scalaVersion>
-          <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
-        </configuration>
-        <executions>
-          <execution>
-            <id>scala-compile-first</id>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>add-source</goal>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>scala-test-compile</id>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.scalatest</groupId>
-        <artifactId>scalatest-maven-plugin</artifactId>
-        <version>1.0</version>
-        <configuration>
-          
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <junitxml>.</junitxml>
-          <filereports>WDF TestSuite.txt</filereports>
-          <skipTests>${skip.scalatest}</skipTests>
-        </configuration>
-        <executions>
-          <execution>
-            <id>integration-test</id>
-            <phase>integration-test</phase>
-            <goals>
-              <goal>test</goal>
-            </goals>
-            <configuration>
-              <!-- Need this false until we can switch to JUnit 4.13 due 
-                to https://github.com/junit-team/junit4/issues/1223 -->
-              <parallel>false</parallel>
-              <tagsToExclude>Integration-Test</tagsToExclude>
-              <argLine>-XX:ReservedCodeCacheSize=512m ${argLine}</argLine>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-failsafe-plugin</artifactId>
-      </plugin>
-      <plugin>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <configuration>
-          <ignoredDependencies>
-            
<ignoredDependency>org.apache.hadoop:hadoop-common</ignoredDependency>
-            
<ignoredDependency>org.apache.hadoop:hadoop-minicluster</ignoredDependency>
-            
<ignoredDependency>org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}</ignoredDependency>
-            <ignoredDependency>org.apache.hbase:hbase-it</ignoredDependency>
-            <ignoredDependency>org.apache.omid:*</ignoredDependency>
-            
<ignoredDependency>org.apache.zookeeper:zookeeper</ignoredDependency>
-          </ignoredDependencies>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-</project>
\ No newline at end of file
diff --git a/phoenix5-spark3-shaded/pom.xml b/phoenix5-spark3-shaded/pom.xml
index f3bc82c..4444cc1 100644
--- a/phoenix5-spark3-shaded/pom.xml
+++ b/phoenix5-spark3-shaded/pom.xml
@@ -53,10 +53,10 @@
       <scope>provided</scope>
     </dependency>
 
-    <!-- Phoenix excludes commons-beanutils from the Omid dependency, but 
that's basically a bug
-     We need to add it back, so that we don't depend on hadoop's 
common-beanutils, which may or
-     may not be shaded.
-     This can be removed once we use a Phoenix version that doesn't have this 
problem -->
+    <!-- Phoenix excludes commons-beanutils from the Omid dependency, but
+      that's basically a bug We need to add it back, so that we don't depend on
+      hadoop's common-beanutils, which may or may not be shaded. This can be 
removed
+      once we use a Phoenix version that doesn't have this problem -->
     <dependency>
       <groupId>commons-beanutils</groupId>
       <artifactId>commons-beanutils</artifactId>
@@ -222,8 +222,8 @@
       <artifactId>protobuf-java</artifactId>
       <scope>provided</scope>
     </dependency>
-    <!-- Other dependencies we don't want to shade in, but are not 
transitively excluded by the
-    above for some reason -->
+    <!-- Other dependencies we don't want to shade in, but are not transitively
+      excluded by the above for some reason -->
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
@@ -338,7 +338,8 @@
                 <exclude>org/w3c/dom/**</exclude>
                 <exclude>org/xml/sax/**</exclude>
                 <!-- Extras compared to Hadoop -->
-                <!-- Hbase classes - Maybe these could be shaded as well ? -->
+                <!-- Hbase classes - Maybe these could be shaded as well
+                  ? -->
                 <exclude>org/apache/hbase/**</exclude>
                 <!-- We use the spark classpath directly -->
                 <exclude>org/apache/spark/**</exclude>
@@ -346,11 +347,10 @@
                 <exclude>org/apache/phoenix/**</exclude>
                 <exclude>org/apache/omid/**</exclude>
                 <!-- Do want/need to expose Tephra as well ? -->
-                <!-- See PHOENIX-7118
-                 Depending on the Spark classpath we may need to leave this 
unshaded, relocate
-                 it under org/apache/hadoop/shaded/ or under 
org/apache/hadoop/hbase/shaded/.
-                 The only thing that is guaranteed not to work is relocating 
it under
-                 ${shaded.package} -->
+                <!-- See PHOENIX-7118 Depending on the Spark classpath we
+                  may need to leave this unshaded, relocate it under 
org/apache/hadoop/shaded/
+                  or under org/apache/hadoop/hbase/shaded/. The only thing 
that is guaranteed
+                  not to work is relocating it under ${shaded.package} -->
                 <exclude>org/apache/commons/configuration2/**</exclude>
               </excludes>
             </relocation>
@@ -385,8 +385,8 @@
               <pattern>com/google/inject/</pattern>
               
<shadedPattern>${shaded.package}.com.google.inject.</shadedPattern>
             </relocation>
-            <!-- This is protobuf 2.5.0 which is shaded to this package in 
hbase-shaded-client,
-             not the modified protobuf 3.x from hbase-thirdparty -->
+            <!-- This is protobuf 2.5.0 which is shaded to this package in
+              hbase-shaded-client, not the modified protobuf 3.x from 
hbase-thirdparty -->
             <relocation>
               <pattern>com/google/protobuf/</pattern>
               
<shadedPattern>${hbase.shaded.package}.com.google.protobuf.</shadedPattern>
@@ -412,7 +412,8 @@
               <pattern>io/</pattern>
               <shadedPattern>${shaded.package}.io.</shadedPattern>
               <excludes>
-                <!-- Exclude config keys for Hadoop that look like package 
names -->
+                <!-- Exclude config keys for Hadoop that look like package
+                  names -->
                 <exclude>io/compression/**</exclude>
                 <exclude>io/mapfile/**</exclude>
                 <exclude>io/map/index/*</exclude>
@@ -456,7 +457,7 @@
               <pattern>net/</pattern>
               <shadedPattern>${shaded.package}.net.</shadedPattern>
               <excludes>
-                <!-- Exclude config keys for Hadoop that look like package 
names -->
+                <!-- Exclude config keys for Hadoop that look like package  
names -->
                 <exclude>net/topology/**</exclude>
               </excludes>
             </relocation>
diff --git a/phoenix5-spark3/pom.xml b/phoenix5-spark3/pom.xml
index aecb6b6..7123e32 100644
--- a/phoenix5-spark3/pom.xml
+++ b/phoenix5-spark3/pom.xml
@@ -41,36 +41,24 @@
   </properties>
 
   <dependencies>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-core</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
-      <scope>runtime</scope>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix.thirdparty</groupId>
-      <artifactId>phoenix-shaded-guava</artifactId>
-      <scope>provided</scope>
-    </dependency>
-
-    <!-- Scala dependencies -->
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
 
-    <!-- Spark dependencies -->
+    <!-- Spark dependencies first to avoid jackson compatibility problems -->
     <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-core_${scala.binary.version}</artifactId>
       <version>${spark.version}</version>
       <scope>provided</scope>
+      <exclusions>
+        <!-- The shaded hadoop-client libraries conflict with the minicluster 
-->
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client-runtime</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
@@ -97,6 +85,34 @@
       <scope>provided</scope>
     </dependency>
 
+    <!-- Phoenix dependencies -->
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-core-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-core-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix.thirdparty</groupId>
+      <artifactId>phoenix-shaded-guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
+      <scope>runtime</scope>
+      <optional>true</optional>
+    </dependency>
+
+    <!-- Scala dependencies -->
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library</artifactId>
+      <version>${scala.version}</version>
+      <scope>provided</scope>
+    </dependency>
+
     <!-- HBase dependencies -->
     <dependency>
       <groupId>org.apache.hbase</groupId>
@@ -114,25 +130,99 @@
       <scope>provided</scope>
     </dependency>
 
+
+    <!-- Hadoop dependencies -->
     <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-      <version>2.5.0</version>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
       <scope>provided</scope>
     </dependency>
-
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    
     <!-- Misc dependencies -->
     <dependency>
       <groupId>joda-time</groupId>
       <artifactId>joda-time</artifactId>
       <version>${jodatime.version}</version>
     </dependency>
+
+    <!-- Phoenix Test dependencies -->
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix-core</artifactId>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+
+    <!-- Scala Test dependencies -->
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-reflect</artifactId>
+      <version>${scala.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.scalactic</groupId>
+      <artifactId>scalactic_${scala.binary.version}</artifactId>
+      <!-- Newer versions would require refactor -->
+      <version>3.1.4</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.scalatest</groupId>
+      <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <version>3.1.4</version>
+      <scope>test</scope>
+    </dependency>
+
+    <!-- Minicluster test dependencies -->
+    <dependency>
+      <!-- Why is this not provided transitively via Phoenix ? -->
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-testing-util</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-it</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
+
+    <!-- Misc test dependencies -->
+    <dependency>
+      <groupId>org.apache.omid</groupId>
+      <artifactId>omid-tso-server</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.omid</groupId>
+      <artifactId>omid-tso-server</artifactId>
+      <scope>test</scope>
+      <type>test-jar</type>
+    </dependency>
+
     <dependency>
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-api</artifactId>
+      <scope>provided</scope>
     </dependency>
 
-    <!-- for Unit Tests. ITs are run from another module -->
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
@@ -142,48 +232,89 @@
   </dependencies>
   <build>
     <plugins>
-        <plugin>
-          <groupId>net.alchim31.maven</groupId>
-          <artifactId>scala-maven-plugin</artifactId>
-          <configuration>
-            <charset>${project.build.sourceEncoding}</charset>
-            <jvmArgs>
-              <jvmArg>-Xmx1024m</jvmArg>
-            </jvmArgs>
-            <scalaVersion>${scala.version}</scalaVersion>
-            <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
-          </configuration>
-          <executions>
-            <execution>
-              <id>scala-compile-first</id>
-              <phase>process-resources</phase>
-              <goals>
-                <goal>add-source</goal>
-                <goal>compile</goal>
-              </goals>
-            </execution>
-          </executions>
-        </plugin>
       <plugin>
-          <artifactId>maven-dependency-plugin</artifactId>
-          <configuration>
-            <ignoreNonCompile>true</ignoreNonCompile>
-            <ignoredDependencies>
-              
<ignoredDependency>org.apache.hadoop:hadoop-common</ignoredDependency>
-              
<ignoredDependency>org.apache.hadoop:hadoop-mapreduce-client-core</ignoredDependency>
-            </ignoredDependencies>
-          </configuration>
-        </plugin>
-        <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-javadoc-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>net.alchim31.maven</groupId>
+        <artifactId>scala-maven-plugin</artifactId>
+        <configuration>
+          <charset>${project.build.sourceEncoding}</charset>
+          <jvmArgs>
+            <jvmArg>-Xmx1024m</jvmArg>
+          </jvmArgs>
+          <scalaVersion>${scala.version}</scalaVersion>
+          <scalaCompatVersion>${scala.binary.version}</scalaCompatVersion>
+        </configuration>
+        <executions>
+          <execution>
+            <id>scala-compile-first</id>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>add-source</goal>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>scala-test-compile</id>
+            <goals>
+              <goal>testCompile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.scalatest</groupId>
+        <artifactId>scalatest-maven-plugin</artifactId>
+        <version>1.0</version>
+        <configuration>
+          
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
+          <junitxml>.</junitxml>
+          <filereports>WDF TestSuite.txt</filereports>
+          <skipTests>${skip.scalatest}</skipTests>
+        </configuration>
+        <executions>
+          <execution>
+            <id>integration-test</id>
+            <phase>integration-test</phase>
+            <goals>
+              <goal>test</goal>
+            </goals>
             <configuration>
-                <skip>${skip.spark.javadoc}</skip>
+              <!-- Need this false until we can switch to JUnit 4.13 due 
+                to https://github.com/junit-team/junit4/issues/1223 -->
+              <parallel>false</parallel>
+              <tagsToExclude>Integration-Test</tagsToExclude>
+              <argLine>-XX:ReservedCodeCacheSize=512m ${argLine}</argLine>
             </configuration>
-        </plugin>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <configuration>
+          <!-- Test runtime dependencies -->
+          <ignoredUnusedDeclaredDependencies>
+            
<ignoredUnsedDeclaredDependency>org.apache.phoenix:phoenix-hbase-compat-${hbase.compat.version}</ignoredUnsedDeclaredDependency>
+            
<ignoredUnsedDeclaredDependency>org.apache.zookeeper:zookeeper</ignoredUnsedDeclaredDependency>
+            
<ignoredUnsedDeclaredDependency>org.apache.hbase:hbase-testing-util</ignoredUnsedDeclaredDependency>
+            
<ignoredUnsedDeclaredDependency>org.apache.hbase:hbase-it</ignoredUnsedDeclaredDependency>
+            
<ignoredUnsedDeclaredDependency>org.apache.hadoop:hadoop-minicluster</ignoredUnsedDeclaredDependency>
+            
<ignoredUnsedDeclaredDependency>org.apache.omid:omid-tso-server</ignoredUnsedDeclaredDependency>
+          </ignoredUnusedDeclaredDependencies>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <configuration>
+          <skip>${skip.spark.javadoc}</skip>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
-    <profiles>
+  <profiles>
     <profile>
       <!-- This only applies when building with 5.1 -->
       <id>exclude-tephra</id>
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/AggregateIT.java 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/AggregateIT.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/AggregateIT.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/AggregateIT.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
similarity index 94%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
index f25a693..efcef71 100644
--- 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
+++ b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/DataSourceApiIT.java
@@ -40,6 +40,7 @@ import java.util.Arrays;
 
 import static 
org.apache.phoenix.spark.sql.connector.PhoenixDataSource.JDBC_URL;
 import static 
org.apache.phoenix.spark.sql.connector.PhoenixDataSource.ZOOKEEPER_URL;
+import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_ZK;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL;
 import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 import static org.junit.Assert.*;
@@ -103,6 +104,13 @@ public class DataSourceApiIT extends 
ParallelStatsDisabledIT {
             .save();
 
             // Use jdbcUrl
+            // In Phoenix 5.2+ getUrl() return a JDBC URL, in earlier versions 
it returns a ZK
+            // quorum
+            String jdbcUrl = getUrl();
+            if (!jdbcUrl.startsWith(JDBC_PROTOCOL)) {
+                jdbcUrl = JDBC_PROTOCOL_ZK + JDBC_PROTOCOL_SEPARATOR + jdbcUrl;
+            }
+
             Dataset<Row> df2 =
                     spark.createDataFrame(
                         Arrays.asList(RowFactory.create(2, "x")),
@@ -110,7 +118,7 @@ public class DataSourceApiIT extends 
ParallelStatsDisabledIT {
 
             df2.write().format("phoenix").mode(SaveMode.Append)
                 .option("table", tableName)
-                .option(JDBC_URL, JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
getUrl())
+                .option(JDBC_URL, jdbcUrl)
                 .save();
 
             // Use default from hbase-site.xml
@@ -147,8 +155,7 @@ public class DataSourceApiIT extends 
ParallelStatsDisabledIT {
             // Use jdbcUrl
             Dataset df2Read = spark.read().format("phoenix")
                     .option("table", tableName)
-                    .option(PhoenixDataSource.JDBC_URL,
-                                JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
getUrl())
+                    .option(PhoenixDataSource.JDBC_URL, jdbcUrl)
                     .load();
 
             assertEquals(3l, df2Read.count());
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/OrderByIT.java 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/OrderByIT.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/OrderByIT.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/OrderByIT.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SaltedTableIT.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SparkUtil.java 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SparkUtil.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/SparkUtil.java
rename to phoenix5-spark3/src/it/java/org/apache/phoenix/spark/SparkUtil.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingDataSource.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/PhoenixTestingTable.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReadFactory.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestPartitionReader.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScan.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixTestScanBuilder.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestBatchWrite.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriter.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestDataWriterFactory.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestWriteBuilder.java
diff --git 
a/phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
 
b/phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
rename to 
phoenix5-spark3/src/it/java/org/apache/phoenix/spark/sql/connector/writer/PhoenixTestingWriterCommitMessage.java
diff --git a/phoenix5-spark3-it/src/it/resources/globalSetup.sql 
b/phoenix5-spark3/src/it/resources/globalSetup.sql
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/globalSetup.sql
rename to phoenix5-spark3/src/it/resources/globalSetup.sql
diff --git a/phoenix5-spark3-it/src/it/resources/log4j.xml 
b/phoenix5-spark3/src/it/resources/log4j.xml
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/log4j.xml
rename to phoenix5-spark3/src/it/resources/log4j.xml
diff --git a/phoenix5-spark3-it/src/it/resources/tenantSetup.sql 
b/phoenix5-spark3/src/it/resources/tenantSetup.sql
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/tenantSetup.sql
rename to phoenix5-spark3/src/it/resources/tenantSetup.sql
diff --git a/phoenix5-spark3-it/src/it/resources/transactionTableSetup.sql 
b/phoenix5-spark3/src/it/resources/transactionTableSetup.sql
similarity index 100%
rename from phoenix5-spark3-it/src/it/resources/transactionTableSetup.sql
rename to phoenix5-spark3/src/it/resources/transactionTableSetup.sql
diff --git 
a/phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
 
b/phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
rename to 
phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
diff --git 
a/phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
rename to 
phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
diff --git 
a/phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
 
b/phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
similarity index 100%
rename from 
phoenix5-spark3-it/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
rename to 
phoenix5-spark3/src/it/scala/org/apache/phoenix/spark/PhoenixSparkITTenantSpecific.scala
diff --git 
a/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
 
b/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
index 6a7ee4d..ee8a80c 100644
--- 
a/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
+++ 
b/phoenix5-spark3/src/main/java/org/apache/phoenix/spark/sql/connector/reader/PhoenixPartitionReader.java
@@ -17,7 +17,6 @@
  */
 package org.apache.phoenix.spark.sql.connector.reader;
 
-import com.google.protobuf.InvalidProtocolBufferException;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -33,8 +32,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
 import org.apache.phoenix.coprocessor.generated.PTableProtos.PTable;
+import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
 import org.apache.phoenix.iterate.ConcatResultIterator;
 import org.apache.phoenix.iterate.LookAheadResultIterator;
 import org.apache.phoenix.iterate.MapReduceParallelScanGrouper;
@@ -85,7 +84,7 @@ public class PhoenixPartitionReader implements 
PartitionReader<InternalRow> {
             PTable pTable = null;
             try {
                 pTable = PTable.parseFrom(options.getPTableCacheBytes());
-            } catch (InvalidProtocolBufferException e) {
+            } catch (Exception e) {
                 throw new RuntimeException("Parsing the PTable Cache Bytes is 
failing ", e);
             }
             org.apache.phoenix.schema.PTable table = 
PTableImpl.createFromProto(pTable);
@@ -121,7 +120,7 @@ public class PhoenixPartitionReader implements 
PartitionReader<InternalRow> {
                     .getQueryServices().getRenewLeaseThresholdMilliSeconds();
             for (Scan scan : scans) {
                 // For MR, skip the region boundary check exception if we 
encounter a split. ref: PHOENIX-2599
-                
scan.setAttribute(BaseScannerRegionObserver.SKIP_REGION_BOUNDARY_CHECK, 
Bytes.toBytes(true));
+                
scan.setAttribute(BaseScannerRegionObserverConstants.SKIP_REGION_BOUNDARY_CHECK,
 Bytes.toBytes(true));
 
                 PeekingResultIterator peekingResultIterator;
                 ScanMetricsHolder scanMetricsHolder =
diff --git a/pom.xml b/pom.xml
index 42b7056..d0075c3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,5 +1,27 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+<?xml version='1.0'?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.phoenix</groupId>
   <artifactId>phoenix-connectors</artifactId>
@@ -13,7 +35,7 @@
       <name>The Apache Software License, Version 2.0</name>
       <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
       <distribution>repo</distribution>
-      <comments/>
+      <comments />
     </license>
   </licenses>
 
@@ -42,29 +64,29 @@
   </scm>
 
   <modules>
-      <!-- Changing the module order here may cause maven to get stuck in an 
infinite loop -->
-      <module>phoenix5-hive</module>
-      <module>phoenix5-hive-shaded</module>
-      <module>phoenix5-spark</module>
-      <module>phoenix5-spark-shaded</module>
-      <module>phoenix5-spark3</module>
-      <module>phoenix5-spark3-it</module>
-      <module>phoenix5-spark3-shaded</module>
-      <module>phoenix5-connectors-assembly</module>
+    <!-- Changing the module order here may cause maven to get stuck in an 
+      infinite loop -->
+    <module>phoenix5-hive</module>
+    <module>phoenix5-hive-shaded</module>
+    <module>phoenix5-spark</module>
+    <module>phoenix5-spark-shaded</module>
+    <module>phoenix5-spark3</module>
+    <module>phoenix5-spark3-shaded</module>
+    <module>phoenix5-connectors-assembly</module>
   </modules>
 
   <properties>
     <!-- Phoenix Version -->
-    <phoenix.version>5.1.3</phoenix.version>
-    <omid.version>1.0.2</omid.version>
+    <phoenix.version>5.2.0</phoenix.version>
+    <omid.version>1.1.2</omid.version>
     <commons-beanutils.version>1.9.4</commons-beanutils.version>
-    <phoenix.thirdparty.version>2.0.0</phoenix.thirdparty.version>
-    <!-- The should match the versions used to build HBase and Hadoop -->
-    <hbase.version>2.4.16</hbase.version>
-    <hbase.compat.version>2.4.1</hbase.compat.version>
-    <hadoop.version>3.1.2</hadoop.version>
-    <zookeeper.version>3.5.7</zookeeper.version>
-    <hbase-thirdparty-version>4.1.4</hbase-thirdparty-version>
+    <phoenix.thirdparty.version>2.1.0</phoenix.thirdparty.version>
+    <!-- These should match the versions used to build HBase and Hadoop -->
+    <hbase.version>2.5.8-hadoop3</hbase.version>
+    <hbase.compat.version>2.5.0</hbase.compat.version>
+    <hadoop.version>3.2.4</hadoop.version>
+    <zookeeper.version>3.8.4</zookeeper.version>
+    <hbase-thirdparty-version>4.1.5</hbase-thirdparty-version>
     <!-- Hbase version dependent versions end -->
 
     <!-- General Properties -->
@@ -75,15 +97,16 @@
     <maven.compiler.source>${compileSource}</maven.compiler.source>
     <maven.compiler.target>${compileSource}</maven.compiler.target>
 
-    <!-- JVM version dependent JVM options for running tests. Keep mostly in 
sync with HBase-->
-    <!-- Note that some components, like Spark 2.x just don't support anything 
newer than Java 8,
-    and the build/tests will fail on those modules anyway  -->
+    <!-- JVM version dependent JVM options for running tests. Keep mostly 
+      in sync with HBase -->
+    <!-- Note that some components, like Spark 2.x just don't support anything 
+      newer than Java 8, and the build/tests will fail on those modules anyway 
-->
     <surefire.Xmx>3000m</surefire.Xmx>
 
     <!-- Profiling is not enabled in the repo. Placeholder. -->
     <jacocoArgLine></jacocoArgLine>
-    <!-- Hard to read, but ScalaTest cannot handle multiline argLine. 
-         It cannot resolve @{} either-->
+    <!-- Hard to read, but ScalaTest cannot handle multiline argLine. It 
+      cannot resolve @{} either -->
     <phoenix-surefire.argLine>-enableassertions -Xmx${surefire.Xmx} 
-Djava.security.egd=file:/dev/./urandom -Djava.net.preferIPv4Stack=true 
-Djava.awt.headless=true -Djdk.net.URLClassPath.disableClassPathURLCheck=true 
-Dorg.apache.hbase.thirdparty.io.netty.leakDetection.level=advanced 
-Dio.netty.eventLoopThreads=3 -Duser.timezone="America/Los_Angeles" 
-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/ 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path 
[...]
     <phoenix-surefire.jdk8.tuning.flags>-XX:NewRatio=4 -XX:SurvivorRatio=8 
-XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+DisableExplicitGC 
-XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled 
-XX:+CMSScavengeBeforeRemark 
-XX:CMSInitiatingOccupancyFraction=68</phoenix-surefire.jdk8.tuning.flags>
     
<phoenix-surefire.jdk11.flags>-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true
 --add-modules jdk.unsupported --add-opens java.base/java.nio=ALL-UNNAMED 
--add-opens java.base/sun.nio.ch=ALL-UNNAMED --add-opens 
java.base/java.lang=ALL-UNNAMED --add-opens 
java.base/jdk.internal.ref=ALL-UNNAMED --add-opens 
java.base/java.lang.reflect=ALL-UNNAMED --add-opens 
java.base/java.util=ALL-UNNAMED --add-opens 
java.base/java.util.concurrent=ALL-UNNAMED --add-exports java.base [...]
@@ -104,11 +127,11 @@
     <spark3.version>3.2.4</spark3.version>
     <scala.version.for.spark3>2.12.18</scala.version.for.spark3>
     <scala.binary.version.for.spark3>2.12</scala.binary.version.for.spark3>
-    
+
     <log4j.version>1.2.17</log4j.version>
-    <log4j2.version>2.18.0</log4j2.version>
+    <log4j2.version>2.23.1</log4j2.version>
     <disruptor.version>3.3.6</disruptor.version>
-    <slf4j.version>1.7.30</slf4j.version>
+    <slf4j.version>1.7.36</slf4j.version>
     <commons-collections.version>3.2.2</commons-collections.version>
     <commons-csv.version>1.0</commons-csv.version>
     <findbugs-annotations.version>1.3.9-1</findbugs-annotations.version>
@@ -184,8 +207,8 @@
           <artifactId>maven-compiler-plugin</artifactId>
         </plugin>
         <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-assembly-plugin</artifactId>
         </plugin>
         <!--This plugin's configuration is used to store Eclipse m2e settings 
           only. It has no influence on the Maven build itself. -->
@@ -220,8 +243,8 @@
         <plugin>
           <groupId>org.apache.rat</groupId>
           <artifactId>apache-rat-plugin</artifactId>
-          <!-- Avoid defining exclusions in pluginManagement as they are 
global.
-               We already inherit some from the ASF parent pom. -->
+          <!-- Avoid defining exclusions in pluginManagement as they are 
+            global. We already inherit some from the ASF parent pom. -->
         </plugin>
         <plugin>
           <groupId>org.owasp</groupId>
@@ -249,11 +272,10 @@
             <maxHeap>2048</maxHeap>
           </configuration>
         </plugin>
-        <!-- We put slow-running tests into src/it and run them during the
-            integration-test phase using the failsafe plugin. This way
-            developers can run unit tests conveniently from the IDE or via
-            "mvn package" from the command line without triggering time
-            consuming integration tests. -->
+        <!-- We put slow-running tests into src/it and run them during the 
+          integration-test phase using the failsafe plugin. This way 
developers can 
+          run unit tests conveniently from the IDE or via "mvn package" from 
the command 
+          line without triggering time consuming integration tests. -->
         <plugin>
           <groupId>org.codehaus.mojo</groupId>
           <artifactId>build-helper-maven-plugin</artifactId>
@@ -298,7 +320,8 @@
           <artifactId>maven-failsafe-plugin</artifactId>
           <!-- Common settings for all executions -->
           <configuration>
-            <forkNode 
implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
+            <forkNode
+              
implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"
 />
             <encoding>UTF-8</encoding>
             <forkCount>${numForkedIT}</forkCount>
             <runOrder>alphabetical</runOrder>
@@ -340,8 +363,8 @@
           </configuration>
         </plugin>
         <plugin>
-          <!-- Allows us to get the apache-ds bundle artifacts
-          TODO What does that mean ? We don't even use this directly. -->
+          <!-- Allows us to get the apache-ds bundle artifacts TODO What 
+            does that mean ? We don't even use this directly. -->
           <groupId>org.apache.felix</groupId>
           <artifactId>maven-bundle-plugin</artifactId>
           <version>${maven.bundle.version}</version>
@@ -370,8 +393,8 @@
             </goals>
           </execution>
         </executions>
-        <!-- FIXME This is an older version, as the current checkstyle 
configuration doesn't work
-        on newer checkstyle versions.-->
+        <!-- FIXME This is an older version, as the current checkstyle 
configuration 
+          doesn't work on newer checkstyle versions. -->
         <dependencies>
           <dependency>
             <groupId>com.puppycrawl.tools</groupId>
@@ -412,7 +435,8 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
-          <forkNode 
implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
+          <forkNode
+            
implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"
 />
           <forkCount>${numForkedUT}</forkCount>
           <reuseForks>true</reuseForks>
           
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
@@ -463,19 +487,19 @@
         </configuration>
       </plugin>
       <plugin>
-          <artifactId>maven-dependency-plugin</artifactId>
-          <executions>
-            <execution>
-              <id>enforce-dependencies</id>
-              <goals>
-                <goal>analyze-only</goal>
-              </goals>
-              <configuration>
-                <failOnWarning>true</failOnWarning>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce-dependencies</id>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+            <configuration>
+              <failOnWarning>true</failOnWarning>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 
@@ -493,6 +517,28 @@
           </exclusion>
         </exclusions>
       </dependency>
+      <dependency>
+        <groupId>org.apache.phoenix</groupId>
+        <artifactId>phoenix-core-client</artifactId>
+        <version>${phoenix.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.apache.hbase.thirdparty</groupId>
+            <artifactId>hbase-shaded-jersey</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.phoenix</groupId>
+        <artifactId>phoenix-core-server</artifactId>
+        <version>${phoenix.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.apache.hbase.thirdparty</groupId>
+            <artifactId>hbase-shaded-jersey</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
       <dependency>
         <groupId>org.apache.phoenix</groupId>
         <artifactId>phoenix-hbase-compat-${hbase.compat.version}</artifactId>
@@ -507,15 +553,15 @@
       </dependency>
       <dependency>
         <groupId>org.apache.omid</groupId>
-        <artifactId>omid-tso-server-hbase2.x</artifactId>
+        <artifactId>omid-tso-server</artifactId>
         <scope>test</scope>
         <version>${omid.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.omid</groupId>
-        <artifactId>omid-tso-server-hbase2.x</artifactId>
+        <artifactId>omid-tso-server</artifactId>
         <scope>test</scope>
-       <version>${omid.version}</version>
+        <version>${omid.version}</version>
         <type>test-jar</type>
       </dependency>
       <dependency>
@@ -548,6 +594,11 @@
         <artifactId>phoenix5-hive</artifactId>
         <version>${project.version}</version>
       </dependency>
+      <!-- <dependency> -->
+      <!-- <groupId>org.apache.phoenix</groupId> -->
+      <!-- <artifactId>phoenix5-hive-it</artifactId> -->
+      <!-- <version>${project.version}</version> -->
+      <!-- </dependency> -->
       <dependency>
         <groupId>org.apache.phoenix</groupId>
         <artifactId>phoenix5-hive-shaded</artifactId>
@@ -560,7 +611,8 @@
       </dependency>
       <!-- HBase dependencies -->
 
-      <!-- These are only needed so that we can set them provided and exclude 
from the shaded jars -->
+      <!-- These are only needed so that we can set them provided and exclude 
+        from the shaded jars -->
       <dependency>
         <groupId>org.apache.hbase</groupId>
         <artifactId>hbase-protocol-shaded</artifactId>
@@ -623,7 +675,7 @@
         <artifactId>hbase-testing-util</artifactId>
         <version>${hbase.version}</version>
         <scope>test</scope>
-        <optional>true</optional>
+        <!-- <optional>true</optional> -->
         <exclusions>
           <exclusion>
             <groupId>org.jruby</groupId>
@@ -648,6 +700,17 @@
           </exclusion>
         </exclusions>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-it</artifactId>
+        <version>${hbase.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.jruby</groupId>
+            <artifactId>jruby-complete</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
         <artifactId>hbase-protocol</artifactId>
@@ -663,7 +726,6 @@
         <artifactId>hbase-common</artifactId>
         <version>${hbase.version}</version>
         <type>test-jar</type>
-        <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
@@ -675,7 +737,6 @@
         <artifactId>hbase-client</artifactId>
         <version>${hbase.version}</version>
         <type>test-jar</type>
-        <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
@@ -687,7 +748,6 @@
         <artifactId>hbase-server</artifactId>
         <version>${hbase.version}</version>
         <type>test-jar</type>
-        <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
@@ -699,7 +759,6 @@
         <artifactId>hbase-hadoop-compat</artifactId>
         <version>${hbase.version}</version>
         <type>test-jar</type>
-        <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
@@ -711,7 +770,6 @@
         <artifactId>hbase-hadoop2-compat</artifactId>
         <version>${hbase.version}</version>
         <type>test-jar</type>
-        <scope>test</scope>
       </dependency>
 
       <!-- Hadoop Dependencies -->
@@ -754,22 +812,20 @@
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-mapreduce-client-core</artifactId>
         <version>${hadoop.version}</version>
-        <scope>provided</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-minicluster</artifactId>
         <version>${hadoop.version}</version>
-        <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-client-minicluster</artifactId>
         <version>${hadoop.version}</version>
-        <scope>test</scope>
       </dependency>
 
-      <!-- Required for mini-cluster since hbase built against old version of 
hadoop -->
+      <!-- Required for mini-cluster since hbase built against old version 
+        of hadoop -->
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-auth</artifactId>
@@ -785,11 +841,52 @@
         <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
         <version>${hadoop.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+        <type>test-jar</type>
+        <version>${hadoop.version}</version>
+      </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-client</artifactId>
         <version>${hadoop.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-common</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-client</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-registry</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-common</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-applicationhistoryservice</artifactId>
+        <version>${hadoop.version}</version>
+      </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs</artifactId>
@@ -799,7 +896,7 @@
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs</artifactId>
         <version>${hadoop.version}</version>
-        <type>test-jar</type> <!-- this does not work which is typical for 
maven.-->
+        <type>test-jar</type> <!-- this does not work which is typical for 
maven. -->
         <scope>test</scope>
       </dependency>
       <dependency>
@@ -836,13 +933,19 @@
         <artifactId>junit</artifactId>
         <version>${junit.version}</version>
       </dependency>
-     <dependency>
+      <dependency>
         <groupId>org.mockito</groupId>
         <artifactId>mockito-all</artifactId>
         <version>${mockito-all.version}</version>
         <scope>test</scope>
       </dependency>
-       <dependency>
+      <dependency>
+        <groupId>org.mockito</groupId>
+        <artifactId>mockito-core</artifactId>
+        <version>${mockito-all.version}</version>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
         <groupId>com.lmax</groupId>
         <artifactId>disruptor</artifactId>
         <version>${disruptor.version}</version>
@@ -858,8 +961,8 @@
       </activation>
     </profile>
     <!-- See PHOENIX-6997 when building with with Java9+ -->
-    <!-- Even if we set the correct JVM parameters for Phoenix, some 
components don't support
-    anything newer than Java 8 -->
+    <!-- Even if we set the correct JVM parameters for Phoenix, some 
components 
+      don't support anything newer than Java 8 -->
     <profile>
       <id>build-with-jdk11</id>
       <activation>
@@ -878,7 +981,7 @@
         <argLine>${phoenix-surefire.jdk11.flags} 
${phoenix-surefire.jdk17.flags} ${phoenix-surefire.jdk17.tuning.flags} 
${phoenix-surefire.argLine}</argLine>
       </properties>
     </profile>
-    <!-- disable doclint with 1.8+ JDKs-->
+    <!-- disable doclint with 1.8+ JDKs -->
     <profile>
       <id>java8-doclint-disabled</id>
       <activation>
@@ -890,13 +993,13 @@
     </profile>
     <!-- Skip Javadoc for 1.8+ See PHOENIX-6922 -->
     <profile>
-        <id>disable-javadoc-for-spark</id>
-        <activation>
-            <jdk>(8,)</jdk>
-        </activation>
-        <properties>
-            <skip.spark.javadoc>true</skip.spark.javadoc>
-        </properties>
+      <id>disable-javadoc-for-spark</id>
+      <activation>
+        <jdk>(8,)</jdk>
+      </activation>
+      <properties>
+        <skip.spark.javadoc>true</skip.spark.javadoc>
+      </properties>
     </profile>
     <!-- this profile should be activated for release builds -->
     <profile>
@@ -930,7 +1033,7 @@
       <id>spotbugs-site</id>
       <activation>
         <property>
-            <name>!spotbugs.site</name>
+          <name>!spotbugs.site</name>
         </property>
       </activation>
       <build>

Reply via email to