Repository: hive
Updated Branches:
  refs/heads/master 330c62f84 -> 346dd8e69


HIVE-17365: Druid CTAS should support CHAR/VARCHAR type (Jesus Camacho 
Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/346dd8e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/346dd8e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/346dd8e6

Branch: refs/heads/master
Commit: 346dd8e693d0b189c8dba01d025a989be2d8994f
Parents: 330c62f
Author: Jesus Camacho Rodriguez <jcama...@apache.org>
Authored: Tue Aug 22 14:13:20 2017 -0700
Committer: Jesus Camacho Rodriguez <jcama...@apache.org>
Committed: Tue Aug 22 14:13:20 2017 -0700

----------------------------------------------------------------------
 .../hadoop/hive/druid/serde/DruidSerDe.java     | 33 ++++++++++++++-
 .../hadoop/hive/druid/TestDruidSerDe.java       | 44 +++++++++++++-------
 2 files changed, 59 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/346dd8e6/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
index 90938aa..8d98b3b 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
@@ -29,7 +29,9 @@ import java.util.Properties;
 
 import org.apache.calcite.adapter.druid.DruidTable;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.druid.DruidStorageHandler;
@@ -42,7 +44,9 @@ import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -52,15 +56,19 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -485,10 +493,17 @@ public class DruidSerDe extends AbstractSerDe {
           res = ((HiveDecimalObjectInspector) 
fields.get(i).getFieldObjectInspector())
                   .getPrimitiveJavaObject(values.get(i)).doubleValue();
           break;
+        case CHAR:
+          res = ((HiveCharObjectInspector) 
fields.get(i).getFieldObjectInspector())
+                  .getPrimitiveJavaObject(values.get(i)).getValue();
+          break;
+        case VARCHAR:
+          res = ((HiveVarcharObjectInspector) 
fields.get(i).getFieldObjectInspector())
+                  .getPrimitiveJavaObject(values.get(i)).getValue();
+          break;
         case STRING:
           res = ((StringObjectInspector) 
fields.get(i).getFieldObjectInspector())
-                  .getPrimitiveJavaObject(
-                          values.get(i));
+                  .getPrimitiveJavaObject(values.get(i));
           break;
         default:
           throw new SerDeException("Unknown type: " + 
types[i].getPrimitiveCategory());
@@ -543,6 +558,20 @@ public class DruidSerDe extends AbstractSerDe {
         case DECIMAL:
           output.add(new HiveDecimalWritable(HiveDecimal.create(((Number) 
value).doubleValue())));
           break;
+        case CHAR:
+          output.add(
+              new HiveCharWritable(
+                  new HiveChar(
+                      value.toString(),
+                      ((CharTypeInfo) types[i]).getLength())));
+          break;
+        case VARCHAR:
+          output.add(
+              new HiveVarcharWritable(
+                  new HiveVarchar(
+                      value.toString(),
+                      ((VarcharTypeInfo) types[i]).getLength())));
+          break;
         case STRING:
           output.add(new Text(value.toString()));
           break;

http://git-wip-us.apache.org/repos/asf/hive/blob/346dd8e6/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
index 62b2d6b..137309c 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
@@ -31,7 +31,9 @@ import java.util.Map.Entry;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.druid.serde.DruidGroupByQueryRecordReader;
 import org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader;
@@ -46,7 +48,9 @@ import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -749,11 +753,13 @@ public class TestDruidSerDe {
   }
 
 
-  private static final String COLUMN_NAMES = "__time,c0,c1,c2,c3,c4,c5,c6,c7";
-  private static final String COLUMN_TYPES = 
"timestamp,string,double,float,decimal(38,18),bigint,int,smallint,tinyint";
+  private static final String COLUMN_NAMES = 
"__time,c0,c1,c2,c3,c4,c5,c6,c7,c8,c9";
+  private static final String COLUMN_TYPES = 
"timestamp,string,char(6),varchar(8),double,float,decimal(38,18),bigint,int,smallint,tinyint";
   private static final Object[] ROW_OBJECT = new Object[] {
       new TimestampWritable(new Timestamp(1377907200000L)),
       new Text("dim1_val"),
+      new HiveCharWritable(new HiveChar("dim2_v", 6)),
+      new HiveVarcharWritable(new HiveVarchar("dim3_val", 8)),
       new DoubleWritable(10669.3D),
       new FloatWritable(10669.45F),
       new HiveDecimalWritable(HiveDecimal.create(1064.34D)),
@@ -767,13 +773,15 @@ public class TestDruidSerDe {
       ImmutableMap.<String, Object>builder()
           .put("__time", 1377907200000L)
           .put("c0", "dim1_val")
-          .put("c1", 10669.3D)
-          .put("c2", 10669.45F)
-          .put("c3", 1064.34D)
-          .put("c4", 1113939L)
-          .put("c5", 1112123)
-          .put("c6", (short) 12)
-          .put("c7", (byte) 0)
+          .put("c1", "dim2_v")
+          .put("c2", "dim3_val")
+          .put("c3", 10669.3D)
+          .put("c4", 10669.45F)
+          .put("c5", 1064.34D)
+          .put("c6", 1113939L)
+          .put("c7", 1112123)
+          .put("c8", (short) 12)
+          .put("c9", (byte) 0)
           .put("__time_granularity", 1377907200000L)
           .build());
 
@@ -854,6 +862,8 @@ public class TestDruidSerDe {
   private static final Object[] ROW_OBJECT_2 = new Object[] {
       new TimestampWritable(new Timestamp(1377907200000L)),
       new Text("dim1_val"),
+      new HiveCharWritable(new HiveChar("dim2_v", 6)),
+      new HiveVarcharWritable(new HiveVarchar("dim3_val", 8)),
       new DoubleWritable(10669.3D),
       new FloatWritable(10669.45F),
       new HiveDecimalWritable(HiveDecimal.create(1064.34D)),
@@ -866,13 +876,15 @@ public class TestDruidSerDe {
       ImmutableMap.<String, Object>builder()
           .put("__time", 1377907200000L)
           .put("c0", "dim1_val")
-          .put("c1", 10669.3D)
-          .put("c2", 10669.45F)
-          .put("c3", 1064.34D)
-          .put("c4", 1113939L)
-          .put("c5", 1112123)
-          .put("c6", (short) 12)
-          .put("c7", (byte) 0)
+          .put("c1", "dim2_v")
+          .put("c2", "dim3_val")
+          .put("c3", 10669.3D)
+          .put("c4", 10669.45F)
+          .put("c5", 1064.34D)
+          .put("c6", 1113939L)
+          .put("c7", 1112123)
+          .put("c8", (short) 12)
+          .put("c9", (byte) 0)
           .build());
 
   @Test

Reply via email to