This is an automated email from the ASF dual-hosted git repository.

volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit e27ef55f4ca1392ff3c29b16f6ad37f0081f72a7
Author: Volodymyr Vysotskyi <[email protected]>
AuthorDate: Sun Mar 22 23:42:59 2020 +0200

    DRILL-6604: Upgrade Drill Hive client to Hive3.1 version
    
    closes #2038
---
 contrib/storage-hive/core/pom.xml                  | 30 +++++++++++++++--
 .../storage-hive/core/src/main/codegen/config.fmpp |  3 +-
 .../main/codegen/{config.fmpp => configHive3.fmpp} |  3 +-
 .../{config.fmpp => data/Hive2DateTypes.tdd}       | 24 ++++++++++---
 .../{config.fmpp => data/Hive3DateTypes.tdd}       | 24 ++++++++++---
 .../core/src/main/codegen/data/HiveTypes.tdd       | 14 --------
 .../codegen/templates/ObjectInspectorHelper.java   | 24 +++++++++----
 .../main/codegen/templates/ObjectInspectors.java   | 37 ++++++++++++++------
 .../exec/store/hive/HiveMetadataProvider.java      |  4 +--
 .../drill/exec/store/hive/HiveUtilities.java       | 39 ++++++++++++----------
 .../client/DrillHiveMetaStoreClientFactory.java    | 36 ++++++++++++++++++--
 .../hive/writers/primitive/HiveDateWriter.java     | 16 +++++----
 .../writers/primitive/HiveTimestampWriter.java     | 16 +++++----
 .../log4j/util/Strings.java}                       | 27 ++++++---------
 .../apache/drill/exec/hive/HiveTestFixture.java    |  2 ++
 .../apache/drill/exec/hive/HiveTestUtilities.java  | 31 ++++++++---------
 .../hive/BaseTestHiveImpersonation.java            | 28 +++++++++++++---
 .../exec/store/hive/HiveTestDataGenerator.java     |  6 ++--
 exec/rpc/pom.xml                                   | 10 ++----
 pom.xml                                            | 15 +++++++--
 20 files changed, 262 insertions(+), 127 deletions(-)

diff --git a/contrib/storage-hive/core/pom.xml 
b/contrib/storage-hive/core/pom.xml
index 50a782b..60603ee 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -30,6 +30,9 @@
   <artifactId>drill-storage-hive-core</artifactId>
   <packaging>jar</packaging>
   <name>contrib/hive-storage-plugin/core</name>
+  <properties>
+    
<freemarker.conf.file>src/main/codegen/configHive3.fmpp</freemarker.conf.file>
+  </properties>
 
   <dependencies>
     <dependency>
@@ -61,6 +64,14 @@
           <groupId>commons-codec</groupId>
           <artifactId>commons-codec</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.logging.log4j</groupId>
+          <artifactId>log4j-slf4j-impl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.logging.log4j</groupId>
+          <artifactId>log4j-1.2-api</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -146,7 +157,7 @@
     <dependency>
       <groupId>org.apache.hive.hcatalog</groupId>
       <artifactId>hive-hcatalog-core</artifactId>
-      <version>2.3.2</version>
+      <version>${hive.version}</version>
       <scope>test</scope>
       <exclusions>
         <exclusion>
@@ -189,6 +200,18 @@
           <groupId>org.apache.logging.log4j</groupId>
           <artifactId>log4j-web</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>servlet-api-2.5</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>io.netty</groupId>
+          <artifactId>netty</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>io.netty</groupId>
+          <artifactId>netty-all</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -208,7 +231,7 @@
               <goal>generate</goal>
             </goals>
             <configuration>
-              <config>src/main/codegen/config.fmpp</config>
+              <config>${freemarker.conf.file}</config>
               <output>${project.build.directory}/generated-sources</output>
               <templates>src/main/codegen/templates</templates>
             </configuration>
@@ -220,6 +243,9 @@
   <profiles>
     <profile>
       <id>mapr</id>
+      <properties>
+        
<freemarker.conf.file>src/main/codegen/config.fmpp</freemarker.conf.file>
+      </properties>
       <build>
         <plugins>
           <plugin>
diff --git a/contrib/storage-hive/core/src/main/codegen/config.fmpp 
b/contrib/storage-hive/core/src/main/codegen/config.fmpp
index 731d67d..a460708 100644
--- a/contrib/storage-hive/core/src/main/codegen/config.fmpp
+++ b/contrib/storage-hive/core/src/main/codegen/config.fmpp
@@ -17,7 +17,8 @@
 #
 
 data: {
-    drillOI:tdd(../data/HiveTypes.tdd)
+    drillDataType:  tdd(../data/Hive2DateTypes.tdd),
+    drillOI:        tdd(../data/HiveTypes.tdd)
 }
 freemarkerLinks: {
     includes: includes/
diff --git a/contrib/storage-hive/core/src/main/codegen/config.fmpp 
b/contrib/storage-hive/core/src/main/codegen/configHive3.fmpp
similarity index 89%
copy from contrib/storage-hive/core/src/main/codegen/config.fmpp
copy to contrib/storage-hive/core/src/main/codegen/configHive3.fmpp
index 731d67d..cc36fc4 100644
--- a/contrib/storage-hive/core/src/main/codegen/config.fmpp
+++ b/contrib/storage-hive/core/src/main/codegen/configHive3.fmpp
@@ -17,7 +17,8 @@
 #
 
 data: {
-    drillOI:tdd(../data/HiveTypes.tdd)
+    drillDataType:  tdd(../data/Hive3DateTypes.tdd),
+    drillOI:        tdd(../data/HiveTypes.tdd)
 }
 freemarkerLinks: {
     includes: includes/
diff --git a/contrib/storage-hive/core/src/main/codegen/config.fmpp 
b/contrib/storage-hive/core/src/main/codegen/data/Hive2DateTypes.tdd
similarity index 61%
copy from contrib/storage-hive/core/src/main/codegen/config.fmpp
copy to contrib/storage-hive/core/src/main/codegen/data/Hive2DateTypes.tdd
index 731d67d..af42ee1 100644
--- a/contrib/storage-hive/core/src/main/codegen/config.fmpp
+++ b/contrib/storage-hive/core/src/main/codegen/data/Hive2DateTypes.tdd
@@ -16,9 +16,23 @@
 # limitations under the License.
 #
 
-data: {
-    drillOI:tdd(../data/HiveTypes.tdd)
-}
-freemarkerLinks: {
-    includes: includes/
+{
+  map: [
+    {
+      hiveType: "DATE",
+      hiveOI: "DateObjectInspector",
+      javaType: "java.sql.Date",
+      writableType: "org.apache.hadoop.hive.serde2.io.DateWritable",
+      drillType: "Date",
+      needOIForDrillType: true
+    },
+    {
+      hiveType: "TIMESTAMP",
+      hiveOI: "TimestampObjectInspector",
+      javaType: "java.sql.Timestamp",
+      writableType: "org.apache.hadoop.hive.serde2.io.TimestampWritable",
+      drillType: "TimeStamp",
+      needOIForDrillType: true
+    }
+  ]
 }
diff --git a/contrib/storage-hive/core/src/main/codegen/config.fmpp 
b/contrib/storage-hive/core/src/main/codegen/data/Hive3DateTypes.tdd
similarity index 58%
copy from contrib/storage-hive/core/src/main/codegen/config.fmpp
copy to contrib/storage-hive/core/src/main/codegen/data/Hive3DateTypes.tdd
index 731d67d..2c873e8 100644
--- a/contrib/storage-hive/core/src/main/codegen/config.fmpp
+++ b/contrib/storage-hive/core/src/main/codegen/data/Hive3DateTypes.tdd
@@ -16,9 +16,23 @@
 # limitations under the License.
 #
 
-data: {
-    drillOI:tdd(../data/HiveTypes.tdd)
-}
-freemarkerLinks: {
-    includes: includes/
+{
+  map: [
+    {
+      hiveType: "DATE",
+      hiveOI: "DateObjectInspector",
+      javaType: "org.apache.hadoop.hive.common.type.Date",
+      writableType: "org.apache.hadoop.hive.serde2.io.DateWritableV2",
+      drillType: "Date",
+      needOIForDrillType: true
+    },
+    {
+      hiveType: "TIMESTAMP",
+      hiveOI: "TimestampObjectInspector",
+      javaType: "org.apache.hadoop.hive.common.type.Timestamp",
+      writableType: "org.apache.hadoop.hive.serde2.io.TimestampWritableV2",
+      drillType: "TimeStamp",
+      needOIForDrillType: true
+    }
+  ]
 }
diff --git a/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd 
b/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
index 3ea9dbb..0133dcd 100644
--- a/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
+++ b/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
@@ -96,25 +96,11 @@
       needOIForDrillType: true
     },
     {
-      hiveType: "TIMESTAMP",
-      hiveOI: "TimestampObjectInspector",
-      javaType: "java.sql.Timestamp",
-      drillType: "TimeStamp",
-      needOIForDrillType: true
-    },
-    {
       hiveType: "DECIMAL",
       hiveOI: "HiveDecimalObjectInspector",
       javaType: "org.apache.hadoop.hive.common.type.HiveDecimal",
       drillType: "VarDecimal",
       needOIForDrillType: true
-    },
-    {
-      hiveType: "DATE",
-      hiveOI: "DateObjectInspector",
-      javaType: "java.sql.Date",
-      drillType: "Date",
-      needOIForDrillType: true
     }
   ]
 }
diff --git 
a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
 
b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
index 6b91c41..3e1ec4e 100644
--- 
a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
+++ 
b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
@@ -48,7 +48,8 @@ public class ObjectInspectorHelper {
   private static Multimap<MinorType, Class> OIMAP_REQUIRED = 
ArrayListMultimap.create();
   private static Multimap<MinorType, Class> OIMAP_OPTIONAL = 
ArrayListMultimap.create();
   static {
-<#list drillOI.map as entry>
+<#assign entries = drillDataType.map + drillOI.map />
+<#list entries as entry>
     <#if entry.needOIForDrillType == true>
     OIMAP_REQUIRED.put(MinorType.${entry.drillType?upper_case}, 
Drill${entry.drillType}${entry.hiveOI}.Required.class);
     OIMAP_OPTIONAL.put(MinorType.${entry.drillType?upper_case}, 
Drill${entry.drillType}${entry.hiveOI}.Optional.class);
@@ -91,7 +92,8 @@ public class ObjectInspectorHelper {
       case PRIMITIVE: {
         PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
         switch(poi.getPrimitiveCategory()) {
-<#list drillOI.map as entry>
+<#assign entries = drillDataType.map + drillOI.map />
+<#list entries as entry>
           case ${entry.hiveType}:{
             JType holderClass = TypeHelper.getHolderType(m, returnType, 
TypeProtos.DataMode.OPTIONAL);
             block.assign(returnValueHolder, JExpr._new(holderClass));
@@ -126,7 +128,8 @@ public class ObjectInspectorHelper {
 
   private static Map<PrimitiveCategory, MinorType> TYPE_HIVE2DRILL = new 
HashMap<>();
   static {
-<#list drillOI.map as entry>
+<#assign entries = drillDataType.map + drillOI.map />
+<#list entries as entry>
     TYPE_HIVE2DRILL.put(PrimitiveCategory.${entry.hiveType}, 
MinorType.${entry.drillType?upper_case});
 </#list>
   }
@@ -156,7 +159,8 @@ public class ObjectInspectorHelper {
       case PRIMITIVE: {
         PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
         switch(poi.getPrimitiveCategory()) {
-<#list drillOI.map as entry>
+<#assign entries = drillDataType.map + drillOI.map />
+<#list entries as entry>
           case ${entry.hiveType}:{
             JConditional jc = block._if(returnValue.eq(JExpr._null()));
             jc._then().assign(returnValueHolder.ref("isSet"), JExpr.lit(0));
@@ -201,17 +205,25 @@ public class ObjectInspectorHelper {
             
jnullif._else().add(returnValueHolder.ref("buffer").invoke("setIndex").arg(JExpr.lit(0)).arg(data.ref("length")));
 
           <#elseif entry.hiveType == "TIMESTAMP">
-            JVar tsVar = 
jc._else().decl(m.directClass(java.sql.Timestamp.class.getCanonicalName()), 
"ts",
+            JVar tsVar = 
jc._else().decl(m.directClass(${entry.javaType}.class.getCanonicalName()), "ts",
               castedOI.invoke("getPrimitiveJavaObject").arg(returnValue));
+              <#if entry.javaType == 
"org.apache.hadoop.hive.common.type.Timestamp">
+            jc._else().assign(returnValueHolder.ref("value"), 
tsVar.invoke("toEpochMilli"));
+              <#else>
             // Bringing relative timestamp value without timezone info to 
timestamp value in UTC, since Drill keeps date-time values in UTC
             JVar localDateTimeVar = 
jc._else().decl(m.directClass(org.joda.time.LocalDateTime.class.getCanonicalName()),
 "localDateTime",
                 
JExpr._new(m.directClass(org.joda.time.LocalDateTime.class.getCanonicalName())).arg(tsVar));
             jc._else().assign(returnValueHolder.ref("value"), 
localDateTimeVar.invoke("toDateTime")
                 
.arg(m.directClass(org.joda.time.DateTimeZone.class.getCanonicalName()).staticRef("UTC")).invoke("getMillis"));
+              </#if>
           <#elseif entry.hiveType == "DATE">
-            JVar dVar = 
jc._else().decl(m.directClass(java.sql.Date.class.getCanonicalName()), "d",
+            JVar dVar = 
jc._else().decl(m.directClass(${entry.javaType}.class.getCanonicalName()), "d",
               castedOI.invoke("getPrimitiveJavaObject").arg(returnValue));
+              <#if entry.javaType == "org.apache.hadoop.hive.common.type.Date">
+            jc._else().assign(returnValueHolder.ref("value"), 
dVar.invoke("toEpochMilli"));
+              <#else>
             jc._else().assign(returnValueHolder.ref("value"), 
dVar.invoke("getTime"));
+              </#if>
           <#else>
             jc._else().assign(returnValueHolder.ref("value"),
               castedOI.invoke("get").arg(returnValue));
diff --git 
a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java 
b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
index a539b7f..2dd6ce2 100644
--- a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
+++ b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
@@ -17,7 +17,8 @@
  */
 <@pp.dropOutputFile />
 
-<#list drillOI.map as entry>
+<#assign entries = drillDataType.map + drillOI.map />
+<#list entries as entry>
 <#if entry.needOIForDrillType == true>
 <@pp.changeOutputFile 
name="/org/apache/drill/exec/expr/fn/impl/hive/Drill${entry.drillType}${entry.hiveOI}.java"
 />
 
@@ -32,12 +33,10 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BooleanWritable;
@@ -204,7 +203,7 @@ public class Drill${entry.drillType}${entry.hiveOI} {
 
 <#elseif entry.drillType == "TimeStamp">
     @Override
-    public java.sql.Timestamp getPrimitiveJavaObject(Object o) {
+    public ${entry.javaType} getPrimitiveJavaObject(Object o) {
     <#if mode == "Optional">
       if (o == null) {
         return null;
@@ -213,14 +212,18 @@ public class Drill${entry.drillType}${entry.hiveOI} {
     <#else>
       final TimeStampHolder h = (TimeStampHolder) o;
     </#if>
+    <#if entry.javaType == "org.apache.hadoop.hive.common.type.Timestamp">
+      return ${entry.javaType}.ofEpochMilli(h.value);
+    <#else>
       org.joda.time.LocalDateTime dateTime = new 
org.joda.time.LocalDateTime(h.value, org.joda.time.DateTimeZone.UTC);
       // use "toDate()" to get java.util.Date object with exactly the same 
fields as this Joda date-time.
       // See more in Javadoc for "LocalDateTime#toDate()"
-      return new java.sql.Timestamp(dateTime.toDate().getTime());
+      return new ${entry.javaType}(dateTime.toDate().getTime());
+    </#if>
     }
 
     @Override
-    public TimestampWritable getPrimitiveWritableObject(Object o) {
+    public ${entry.writableType} getPrimitiveWritableObject(Object o) {
     <#if mode == "Optional">
       if (o == null) {
         return null;
@@ -229,15 +232,19 @@ public class Drill${entry.drillType}${entry.hiveOI} {
     <#else>
       final TimeStampHolder h = (TimeStampHolder) o;
     </#if>
+    <#if entry.javaType == "org.apache.hadoop.hive.common.type.Timestamp">
+      return new 
${entry.writableType}(${entry.javaType}.ofEpochMilli(h.value));
+    <#else>
       org.joda.time.LocalDateTime dateTime = new 
org.joda.time.LocalDateTime(h.value, org.joda.time.DateTimeZone.UTC);
       // use "toDate()" to get java.util.Date object with exactly the same 
fields as this Joda date-time.
       // See more in Javadoc for "LocalDateTime#toDate()"
-      return new TimestampWritable(new 
java.sql.Timestamp(dateTime.toDate().getTime()));
+      return new ${entry.writableType}(new 
${entry.javaType}(dateTime.toDate().getTime()));
+    </#if>
     }
 
 <#elseif entry.drillType == "Date">
     @Override
-    public java.sql.Date getPrimitiveJavaObject(Object o) {
+    public ${entry.javaType} getPrimitiveJavaObject(Object o) {
     <#if mode == "Optional">
       if (o == null) {
         return null;
@@ -246,14 +253,18 @@ public class Drill${entry.drillType}${entry.hiveOI} {
     <#else>
       final DateHolder h = (DateHolder) o;
     </#if>
+    <#if entry.javaType == "org.apache.hadoop.hive.common.type.Date">
+      return org.apache.hadoop.hive.common.type.Date.ofEpochMilli(h.value);
+    <#else>
       org.joda.time.LocalDate localDate = new org.joda.time.LocalDate(h.value, 
org.joda.time.DateTimeZone.UTC);
       // Use "toDate()" to get java.util.Date object with exactly the same 
year the same year, month and day as Joda date.
       // See more in Javadoc for "LocalDate#toDate()"
-      return new java.sql.Date(localDate.toDate().getTime());
+      return new ${entry.javaType}(localDate.toDate().getTime());
+    </#if>
     }
 
     @Override
-    public DateWritable getPrimitiveWritableObject(Object o) {
+    public ${entry.writableType} getPrimitiveWritableObject(Object o) {
     <#if mode == "Optional">
       if (o == null) {
         return null;
@@ -262,10 +273,14 @@ public class Drill${entry.drillType}${entry.hiveOI} {
     <#else>
       final DateHolder h = (DateHolder) o;
     </#if>
+    <#if entry.javaType == "org.apache.hadoop.hive.common.type.Date">
+      return new 
${entry.writableType}(org.apache.hadoop.hive.common.type.Date.ofEpochMilli(h.value));
+    <#else>
       org.joda.time.LocalDate localDate = new org.joda.time.LocalDate(h.value, 
org.joda.time.DateTimeZone.UTC);
       // Use "toDate()" to get java.util.Date object with exactly the same 
year the same year, month and day as Joda date.
       // See more in Javadoc for "LocalDate#toDate()"
-      return new DateWritable(new java.sql.Date(localDate.toDate().getTime()));
+      return new ${entry.writableType}(new 
${entry.javaType}(localDate.toDate().getTime()));
+    </#if>
     }
 
 <#else>
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
index 4a2bb58..35dca62 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
@@ -31,9 +31,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputFormat;
@@ -102,7 +102,7 @@ public class HiveMetadataProvider {
     HiveTableWithColumnCache table = hiveReadEntry.getTable();
     try {
       if (!isPartitionedTable) {
-        Properties properties = MetaStoreUtils.getTableMetadata(table);
+        Properties properties = new Table(table).getMetadata();
         HiveStats stats = HiveStats.getStatsFromProps(properties);
         if (stats.valid()) {
           return stats;
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
index f7c7099..531284d 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
@@ -67,7 +67,6 @@ import org.apache.drill.exec.work.ExecErrorConstants;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -75,6 +74,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -447,7 +447,7 @@ public class HiveUtilities {
       }
       final HiveStorageHandler storageHandler = 
HiveUtils.getStorageHandler(job, storageHandlerClass);
       TableDesc tableDesc = new TableDesc();
-      tableDesc.setProperties(MetaStoreUtils.getTableMetadata(table));
+      tableDesc.setProperties(new 
org.apache.hadoop.hive.ql.metadata.Table(table).getMetadata());
       storageHandler.configureInputJobProperties(tableDesc, 
table.getParameters());
       return (Class<? extends InputFormat<?, ?>>) 
storageHandler.getInputFormatClass();
     } else {
@@ -468,7 +468,7 @@ public class HiveUtilities {
   }
 
   /**
-   * Wrapper around {@link 
MetaStoreUtils#getPartitionMetadata(org.apache.hadoop.hive.metastore.api.Partition,
 Table)}
+   * Wrapper around {@code 
MetaStoreUtils#getPartitionMetadata(org.apache.hadoop.hive.metastore.api.Partition,
 Table)}
    * which also adds parameters from table to properties returned by that 
method.
    *
    * @param partition the source of partition level parameters
@@ -477,16 +477,20 @@ public class HiveUtilities {
    */
   public static Properties getPartitionMetadata(final HivePartition partition, 
final HiveTableWithColumnCache table) {
     restoreColumns(table, partition);
-    Properties properties = MetaStoreUtils.getPartitionMetadata(partition, 
table);
-
-    // SerDe expects properties from Table, but above call doesn't add Table 
properties.
-    // Include Table properties in final list in order to not to break SerDes 
that depend on
-    // Table properties. For example AvroSerDe gets the schema from properties 
(passed as second argument)
-    table.getParameters().entrySet().stream()
-        .filter(e -> e.getKey() != null && e.getValue() != null)
-        .forEach(e -> properties.put(e.getKey(), e.getValue()));
-
-    return properties;
+    try {
+      Properties properties = new 
org.apache.hadoop.hive.ql.metadata.Partition(new 
org.apache.hadoop.hive.ql.metadata.Table(table), 
partition).getMetadataFromPartitionSchema();
+
+      // SerDe expects properties from Table, but above call doesn't add Table 
properties.
+      // Include Table properties in final list in order to not to break 
SerDes that depend on
+      // Table properties. For example AvroSerDe gets the schema from 
properties (passed as second argument)
+      table.getParameters().entrySet().stream()
+          .filter(e -> e.getKey() != null && e.getValue() != null)
+          .forEach(e -> properties.put(e.getKey(), e.getValue()));
+
+      return properties;
+    } catch (HiveException e) {
+      throw new DrillRuntimeException(e);
+    }
   }
 
   /**
@@ -507,17 +511,16 @@ public class HiveUtilities {
   }
 
   /**
-   * Wrapper around {@link MetaStoreUtils#getSchema(StorageDescriptor, 
StorageDescriptor, Map, String, String, List)}
+   * Wrapper around {@code MetaStoreUtils#getSchema(StorageDescriptor, 
StorageDescriptor, Map, String, String, List)}
    * which also sets columns from table cache to table and returns properties 
returned by
-   * {@link MetaStoreUtils#getSchema(StorageDescriptor, StorageDescriptor, 
Map, String, String, List)}.
+   * {@code MetaStoreUtils#getSchema(StorageDescriptor, StorageDescriptor, 
Map, String, String, List)}.
    *
    * @param table Hive table with cached columns
    * @return Hive table metadata
    */
   public static Properties getTableMetadata(HiveTableWithColumnCache table) {
     restoreColumns(table, null);
-    return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), 
table.getParameters(),
-      table.getDbName(), table.getTableName(), table.getPartitionKeys());
+    return new org.apache.hadoop.hive.ql.metadata.Table(table).getMetadata();
   }
 
   /**
@@ -587,7 +590,7 @@ public class HiveUtilities {
   public static void verifyAndAddTransactionalProperties(JobConf job, 
StorageDescriptor sd) {
 
     if (AcidUtils.isTablePropertyTransactional(job)) {
-      AcidUtils.setTransactionalTableScan(job, true);
+      HiveConf.setBoolVar(job, 
HiveConf.ConfVars.HIVE_TRANSACTIONAL_TABLE_SCAN, true);
 
       // No work is needed, if schema evolution is used
       if (Utilities.isSchemaEvolutionEnabled(job, true) && 
job.get(IOConstants.SCHEMA_EVOLUTION_COLUMNS) != null &&
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/client/DrillHiveMetaStoreClientFactory.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/client/DrillHiveMetaStoreClientFactory.java
index f392ba3..0ec9202 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/client/DrillHiveMetaStoreClientFactory.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/client/DrillHiveMetaStoreClientFactory.java
@@ -24,8 +24,9 @@ import 
org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.exec.util.ImpersonationUtil;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.shims.Utils;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
 
 /**
  * Provides factory methods for initialization of {@link 
DrillHiveMetaStoreClient} instances.
@@ -65,7 +66,7 @@ public final class DrillHiveMetaStoreClientFactory {
           // delegation tokens).
           String delegationToken = 
processUserMetaStoreClient.getDelegationToken(userName, userName);
           try {
-            Utils.setTokenStr(ugiForRpc, delegationToken, 
DrillHiveMetaStoreClientWithAuthorization.DRILL2HMS_TOKEN);
+            setTokenStr(ugiForRpc, delegationToken, 
DrillHiveMetaStoreClientWithAuthorization.DRILL2HMS_TOKEN);
           } catch (IOException e) {
             throw new DrillRuntimeException("Couldn't setup delegation token 
in the UGI for Hive MetaStoreClient", e);
           }
@@ -89,6 +90,37 @@ public final class DrillHiveMetaStoreClientFactory {
   }
 
   /**
+   * Create a delegation token object for the given token string and service.
+   * Add the token to given UGI
+   *
+   * @param ugi          user group information
+   * @param tokenStr     token string
+   * @param tokenService token service
+   * @throws IOException if error happened during decoding token string
+   */
+  public static void setTokenStr(UserGroupInformation ugi, String tokenStr, 
String tokenService)
+      throws IOException {
+    Token<?> delegationToken = createToken(tokenStr, tokenService);
+    ugi.addToken(delegationToken);
+  }
+
+  /**
+   * Create a new token using the given string and service
+   *
+   * @param tokenStr     token string
+   * @param tokenService token service
+   * @return {@link Token} instance with decoded string
+   * @throws IOException if error happened during decoding token string
+   */
+  private static Token<?> createToken(String tokenStr, String tokenService)
+      throws IOException {
+    Token<?> delegationToken = new Token<>();
+    delegationToken.decodeFromUrlString(tokenStr);
+    delegationToken.setService(new Text(tokenService));
+    return delegationToken;
+  }
+
+  /**
    * Create a DrillMetaStoreClient that can be shared across multiple users. 
This is created when impersonation is
    * disabled.
    *
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
index a1b4822..6d1c3ed 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
@@ -18,21 +18,25 @@
 package org.apache.drill.exec.store.hive.writers.primitive;
 
 import org.apache.drill.exec.vector.complex.writer.DateWriter;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 
-public class HiveDateWriter extends 
AbstractSingleValueWriter<DateObjectInspector, DateWriter> {
+import java.sql.Date;
 
-  public HiveDateWriter(DateObjectInspector inspector, DateWriter writer) {
+public class HiveDateWriter extends 
AbstractSingleValueWriter<PrimitiveObjectInspector, DateWriter> {
+
+  public HiveDateWriter(PrimitiveObjectInspector inspector, DateWriter writer) 
{
     super(inspector, writer);
   }
 
   @Override
   public void write(Object value) {
-    final java.sql.Date dateValue = inspector.getPrimitiveJavaObject(value);
-    final DateTime date = new 
DateTime(dateValue.getTime()).withZoneRetainFields(DateTimeZone.UTC);
-    writer.writeDate(date.getMillis());
+    String dateString = PrimitiveObjectInspectorUtils.getString(value, 
inspector);
+    long dateMillis = new DateTime(Date.valueOf(dateString).getTime())
+        .withZoneRetainFields(DateTimeZone.UTC).getMillis();
+    writer.writeDate(dateMillis);
   }
 
 }
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveTimestampWriter.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveTimestampWriter.java
index 9bc2b6a..72108c5 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveTimestampWriter.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveTimestampWriter.java
@@ -18,21 +18,25 @@
 package org.apache.drill.exec.store.hive.writers.primitive;
 
 import org.apache.drill.exec.vector.complex.writer.TimeStampWriter;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 
-public class HiveTimestampWriter extends 
AbstractSingleValueWriter<TimestampObjectInspector, TimeStampWriter> {
+import java.sql.Timestamp;
 
-  public HiveTimestampWriter(TimestampObjectInspector inspector, 
TimeStampWriter writer) {
+public class HiveTimestampWriter extends 
AbstractSingleValueWriter<PrimitiveObjectInspector, TimeStampWriter> {
+
+  public HiveTimestampWriter(PrimitiveObjectInspector inspector, 
TimeStampWriter writer) {
     super(inspector, writer);
   }
 
   @Override
   public void write(Object value) {
-    final java.sql.Timestamp timestampValue = 
inspector.getPrimitiveJavaObject(value);
-    final DateTime ts = new 
DateTime(timestampValue.getTime()).withZoneRetainFields(DateTimeZone.UTC);
-    writer.writeTimeStamp(ts.getMillis());
+    String timestampString = PrimitiveObjectInspectorUtils.getString(value, 
inspector);
+    long timestampMillis = new 
DateTime(Timestamp.valueOf(timestampString).getTime())
+        .withZoneRetainFields(DateTimeZone.UTC).getMillis();
+    writer.writeTimeStamp(timestampMillis);
   }
 
 }
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/logging/log4j/util/Strings.java
similarity index 52%
copy from 
contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
copy to 
contrib/storage-hive/core/src/main/java/org/apache/logging/log4j/util/Strings.java
index a1b4822..7ec47c5 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/writers/primitive/HiveDateWriter.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/logging/log4j/util/Strings.java
@@ -15,24 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.store.hive.writers.primitive;
+package org.apache.logging.log4j.util;
 
-import org.apache.drill.exec.vector.complex.writer.DateWriter;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import org.apache.commons.lang3.StringUtils;
 
-public class HiveDateWriter extends 
AbstractSingleValueWriter<DateObjectInspector, DateWriter> {
-
-  public HiveDateWriter(DateObjectInspector inspector, DateWriter writer) {
-    super(inspector, writer);
-  }
+/**
+ * Hive uses class with the same full name from log4j-1.2-api.
+ * Added this class to avoid ClassNotFound errors from Hive.
+ *
+ * See <a 
href="https://issues.apache.org/jira/browse/HIVE-23088";>HIVE-23088</a> for the 
problem description.
+ */
+public class Strings {
 
-  @Override
-  public void write(Object value) {
-    final java.sql.Date dateValue = inspector.getPrimitiveJavaObject(value);
-    final DateTime date = new 
DateTime(dateValue.getTime()).withZoneRetainFields(DateTimeZone.UTC);
-    writer.writeDate(date.getMillis());
+  public static boolean isBlank(final String s) {
+    return StringUtils.isBlank(s);
   }
-
 }
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
index ad8c31c..0bf5d42 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
@@ -167,6 +167,8 @@ public class HiveTestFixture {
       driverOption(ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
       driverOption(ConfVars.METASTORE_AUTO_CREATE_ALL, Boolean.toString(true));
       driverOption(ConfVars.METASTORE_SCHEMA_VERIFICATION, 
Boolean.toString(false));
+      driverOption(ConfVars.HIVE_MATERIALIZED_VIEW_ENABLE_AUTO_REWRITING, 
Boolean.toString(false));
+      driverOption(HiveConf.ConfVars.HIVESESSIONSILENT, 
Boolean.toString(true));
       driverOption(ConfVars.HIVE_CBO_ENABLED, Boolean.toString(false));
     }
 
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
index 8518fca..537d0f4 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
@@ -27,9 +27,10 @@ import java.util.Set;
 
 import org.apache.drill.test.QueryBuilder;
 import org.apache.drill.test.TestTools;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.util.ComparableVersion;
+import org.apache.hive.common.util.HiveVersionInfo;
 import org.junit.AssumptionViolatedException;
 
 import static org.hamcrest.CoreMatchers.containsString;
@@ -46,27 +47,19 @@ public class HiveTestUtilities {
   private static final Set<PosixFilePermission> ALL_POSIX_PERMISSIONS = 
EnumSet.allOf(PosixFilePermission.class);
 
   /**
-   * Execute the give <i>query</i> on given <i>hiveDriver</i> instance. If a 
{@link CommandNeedRetryException}
-   * exception is thrown, it tries upto 3 times before returning failure.
-   * @param hiveDriver
-   * @param query
+   * Execute the give <i>query</i> on given <i>hiveDriver</i> instance.
    */
   public static void executeQuery(Driver hiveDriver, String query) {
-    CommandProcessorResponse response = null;
-    boolean failed = false;
-    int retryCount = 3;
-
+    CommandProcessorResponse response;
     try {
       response = hiveDriver.run(query);
-    } catch(CommandNeedRetryException ex) {
-      if (--retryCount == 0) {
-        failed = true;
-      }
+    } catch (Exception e) {
+       throw new RuntimeException(e);
     }
 
-    if (failed || response.getResponseCode() != 0 ) {
+    if (response.getResponseCode() != 0 ) {
       throw new RuntimeException(String.format("Failed to execute command 
'%s', errorMsg = '%s'",
-          query, (response != null ? response.getErrorMessage() : "")));
+          query, response.getErrorMessage()));
     }
   }
 
@@ -142,6 +135,14 @@ public class HiveTestUtilities {
   }
 
   /**
+   * Checks whether current version is not less than hive 3.0
+   */
+  public static boolean isHive3() {
+    return new ComparableVersion(HiveVersionInfo.getVersion())
+        .compareTo(new ComparableVersion("3.0")) >= 0;
+  }
+
+  /**
    * Checks if current version is supported by Hive.
    *
    * @throws AssumptionViolatedException if current version is not supported 
by Hive,
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
index 4fc85cc..33e77ea 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
@@ -27,14 +27,13 @@ import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.impersonation.BaseTestImpersonation;
 import org.apache.drill.exec.store.hive.HiveStoragePluginConfig;
 import org.apache.drill.test.TestBuilder;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.junit.BeforeClass;
 
 import static 
org.apache.drill.exec.hive.HiveTestUtilities.createDirWithPosixPermissions;
@@ -85,6 +84,9 @@ public class BaseTestHiveImpersonation extends 
BaseTestImpersonation {
     hiveConf.set(ConfVars.METASTORE_SCHEMA_VERIFICATION.varname, "false");
     hiveConf.set(ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true");
     hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false");
+    hiveConf.set(ConfVars.HIVESTATSAUTOGATHER.varname, "false");
+    hiveConf.set(ConfVars.HIVESTATSCOLAUTOGATHER.varname, "false");
+    hiveConf.set(ConfVars.HIVESESSIONSILENT.varname, "true");
 
     // Set MiniDFS conf in HiveConf
     hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));
@@ -97,11 +99,29 @@ public class BaseTestHiveImpersonation extends 
BaseTestImpersonation {
   }
 
   protected static void startHiveMetaStore() throws Exception {
-    final int port = MetaStoreUtils.findFreePort();
+    Class<?> metaStoreUtilsClass;
+    Class<?> hadoopThriftAuthBridgeClass;
+    Class<?> confClass;
+    Object hadoopThriftAuthBridge;
+    // TODO: remove reflection stuff when all supported profiles will be 
switched to Hive 3+ version
+    try {
+      metaStoreUtilsClass = 
Class.forName("org.apache.hadoop.hive.metastore.utils.MetaStoreUtils");
+      hadoopThriftAuthBridgeClass = 
Class.forName("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge");
+      hadoopThriftAuthBridge = 
hadoopThriftAuthBridgeClass.getDeclaredMethod("getBridge").invoke(null);
+      confClass = Configuration.class;
+    } catch (ClassNotFoundException e) {
+      metaStoreUtilsClass = 
Class.forName("org.apache.hadoop.hive.metastore.MetaStoreUtils");
+      hadoopThriftAuthBridgeClass = 
Class.forName("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge");
+      hadoopThriftAuthBridge = 
Class.forName("org.apache.hadoop.hive.shims.ShimLoader")
+          .getDeclaredMethod("getHadoopThriftAuthBridge").invoke(null);
+      confClass = HiveConf.class;
+    }
+    final int port = (int) 
metaStoreUtilsClass.getDeclaredMethod("findFreePort").invoke(null);
 
     hiveConf.set(METASTOREURIS.varname, "thrift://localhost:" + port);
 
-    MetaStoreUtils.startMetaStore(port, 
ShimLoader.getHadoopThriftAuthBridge(), hiveConf);
+    metaStoreUtilsClass.getDeclaredMethod("startMetaStore", int.class, 
hadoopThriftAuthBridgeClass, confClass)
+        .invoke(null, port, hadoopThriftAuthBridge, hiveConf);
   }
 
   protected static HiveStoragePluginConfig createHiveStoragePlugin(final 
Map<String, String> hiveConfig) throws Exception {
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
index d5a3f72..376b49d 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
@@ -25,6 +25,7 @@ import java.sql.Timestamp;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.BaseTestQuery;
@@ -442,7 +443,8 @@ public class HiveTestDataGenerator {
     executeQuery(hiveDriver, "CREATE OR REPLACE VIEW readtest_view AS SELECT * 
FROM readtest");
     executeQuery(hiveDriver, "CREATE VIEW IF NOT EXISTS hive_view AS SELECT * 
FROM kv");
     executeQuery(hiveDriver, "CREATE OR REPLACE VIEW kv_native_view AS SELECT 
* FROM kv_native");
-    executeQuery(hiveDriver, "CREATE MATERIALIZED VIEW IF NOT EXISTS 
hive_view_m AS SELECT * FROM kv WHERE key = 1");
+    String disableRewrite = HiveTestUtilities.isHive3() ? "DISABLE REWRITE" : 
"";
+    executeQuery(hiveDriver, String.format("CREATE MATERIALIZED VIEW IF NOT 
EXISTS hive_view_m %s AS SELECT * FROM kv WHERE key = 1", disableRewrite));
     executeQuery(hiveDriver, "CREATE OR REPLACE VIEW view_over_hive_view AS 
SELECT * FROM hive_view WHERE key BETWEEN 2 AND 3");
     executeQuery(hiveDriver, "CREATE OR REPLACE VIEW db1.two_table_view AS 
SELECT COUNT(dk.key) dk_key_count FROM db1.avro dk " +
         "INNER JOIN kv ON kv.key = dk.key");
@@ -592,7 +594,7 @@ public class HiveTestDataGenerator {
 
   private String generateTestDataWithHeadersAndFooters(String tableName, int 
rowCount, int headerLines, int footerLines) {
     StringBuilder sb = new StringBuilder();
-    sb.append("insert into table ").append(tableName).append(" (key, value) 
values ");
+    sb.append("insert into table ").append(tableName).append(" values ");
     sb.append(StringUtils.repeat("('key_header', 'value_header')", ",", 
headerLines));
     if (headerLines > 0) {
       sb.append(",");
diff --git a/exec/rpc/pom.xml b/exec/rpc/pom.xml
index dead5a4..81caa5b 100644
--- a/exec/rpc/pom.xml
+++ b/exec/rpc/pom.xml
@@ -45,7 +45,7 @@
       <artifactId>drill-memory-base</artifactId>
       <version>${project.version}</version>
     </dependency>
-    
+
     <dependency>
       <groupId>com.google.protobuf</groupId>
       <artifactId>protobuf-java</artifactId>
@@ -54,7 +54,7 @@
       <groupId>io.netty</groupId>
       <artifactId>netty-transport-native-epoll</artifactId>
       <classifier>linux-x86_64</classifier>
-      <version>4.0.48.Final</version>
+      <version>${netty.version}</version>
       <exclusions>
         <exclusion>
           <groupId>io.netty</groupId>
@@ -84,10 +84,4 @@
     </dependency>
   </dependencies>
 
-
-  <build>
-  </build>
-
-
-
 </project>
diff --git a/pom.xml b/pom.xml
index bacd6bd..7f46c3c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -81,7 +81,7 @@
       Currently Hive storage plugin only supports Apache Hive 2.3.2 or vendor 
specific variants of the
       Apache Hive 2.3.2. If the version is changed, make sure the jars and 
their dependencies are updated.
     -->
-    <hive.version>2.3.2</hive.version>
+    <hive.version>3.1.2</hive.version>
     <hadoop.version>3.2.1</hadoop.version>
     <hbase.version>2.2.2</hbase.version>
     <fmpp.version>1.0</fmpp.version>
@@ -112,6 +112,7 @@
     <surefire.version>3.0.0-M4</surefire.version>
     <commons.compress.version>1.19</commons.compress.version>
     <hikari.version>3.4.2</hikari.version>
+    <netty.version>4.0.48.Final</netty.version>
   </properties>
 
   <scm>
@@ -1002,13 +1003,11 @@
     <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty-handler</artifactId>
-      <version>4.0.48.Final</version>
     </dependency>
 
     <dependency>
       <groupId>io.netty</groupId>
       <artifactId>netty-common</artifactId>
-      <version>4.0.48.Final</version>
     </dependency>
 
     <dependency>
@@ -1692,6 +1691,16 @@
         <optional>true</optional>
       </dependency>
       <dependency>
+        <groupId>io.netty</groupId>
+        <artifactId>netty-handler</artifactId>
+        <version>${netty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>io.netty</groupId>
+        <artifactId>netty-common</artifactId>
+        <version>${netty.version}</version>
+      </dependency>
+      <dependency>
         <groupId>com.tdunning</groupId>
         <artifactId>json</artifactId>
         <version>1.8</version>

Reply via email to