[ 
https://issues.apache.org/jira/browse/DRILL-6242?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16474694#comment-16474694
 ] 

ASF GitHub Bot commented on DRILL-6242:
---------------------------------------

jiang-wu closed pull request #1184: DRILL-6242 - Use 
java.sql.[Date|Time|Timestamp] classes to hold value…
URL: https://github.com/apache/drill/pull/1184
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
index d4e0b5cb9c..3ae6aee337 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
@@ -17,22 +17,24 @@
  */
 package org.apache.drill.exec.fn.hive;
 
-import com.google.common.collect.Lists;
+import java.sql.Timestamp;
+import java.util.List;
+
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.categories.HiveStorageTest;
-import org.apache.drill.test.QueryTestUtil;
-import org.apache.drill.test.TestBuilder;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.compile.ClassTransformer;
 import org.apache.drill.exec.hive.HiveTestBase;
 import org.apache.drill.exec.server.options.OptionValue;
+import org.apache.drill.test.QueryTestUtil;
+import org.apache.drill.test.TestBuilder;
 import org.joda.time.DateTime;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.util.List;
+import com.google.common.collect.Lists;
 
 @Category({SlowTest.class, HiveStorageTest.class})
 public class TestInbuiltHiveUDFs extends HiveTestBase {
@@ -169,7 +171,7 @@ public void testFromUTCTimestamp() throws Exception {
         .sqlQuery("select from_utc_timestamp('1970-01-01 08:00:00','PST') as 
PST_TIMESTAMP from (VALUES(1))")
         .unOrdered()
         .baselineColumns("PST_TIMESTAMP")
-        .baselineValues(DateTime.parse("1970-01-01T00:00:00.0"))
+        .baselineValues(new 
Timestamp(DateTime.parse("1970-01-01T00:00:00.0").getMillis()))
         .go();
   }
 
@@ -179,7 +181,7 @@ public void testToUTCTimestamp() throws Exception {
         .sqlQuery("select to_utc_timestamp('1970-01-01 00:00:00','PST') as 
UTC_TIMESTAMP from (VALUES(1))")
         .unOrdered()
         .baselineColumns("UTC_TIMESTAMP")
-        .baselineValues(DateTime.parse("1970-01-01T08:00:00.0"))
+        .baselineValues(new 
Timestamp(DateTime.parse("1970-01-01T08:00:00.0").getMillis()))
         .go();
   }
 
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
index 4da22b6a3a..5a97bf7ea9 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
@@ -17,8 +17,19 @@
  */
 package org.apache.drill.exec.hive;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.HiveStorageTest;
 import org.apache.drill.categories.SlowTest;
@@ -28,24 +39,13 @@
 import org.apache.drill.exec.proto.UserProtos;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.joda.time.DateTime;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.List;
-import java.util.Map;
-
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 
 @Category({SlowTest.class, HiveStorageTest.class})
 public class TestHiveStorage extends HiveTestBase {
@@ -160,8 +160,8 @@ public void readAllSupportedHiveDataTypes() throws 
Exception {
             3455,
             "stringfield",
             "varcharfield",
-            new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-            new DateTime(Date.valueOf("2013-07-05").getTime()),
+            Timestamp.valueOf("2013-07-05 17:01:00"),
+            Date.valueOf("2013-07-05"),
             "charfield",
             // There is a regression in Hive 1.2.1 in binary type partition 
columns. Disable for now.
             //"binary",
@@ -179,8 +179,8 @@ public void readAllSupportedHiveDataTypes() throws 
Exception {
             3455,
             "string",
             "varchar",
-            new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-            new DateTime(Date.valueOf("2013-07-05").getTime()),
+            Timestamp.valueOf("2013-07-05 17:01:00"),
+            Date.valueOf("2013-07-05"),
             "char")
         .baselineValues( // All fields are null, but partition fields have 
non-null values
             null, null, null, null, null, null, null, null, null, null, null, 
null, null, null, null, null, null, null,
@@ -200,8 +200,8 @@ public void readAllSupportedHiveDataTypes() throws 
Exception {
             3455,
             "string",
             "varchar",
-            new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-            new DateTime(Date.valueOf("2013-07-05").getTime()),
+            Timestamp.valueOf("2013-07-05 17:01:00"),
+            Date.valueOf("2013-07-05"),
             "char")
         .go();
   }
@@ -275,7 +275,7 @@ public void readAllSupportedHiveDataTypesNativeParquet() 
throws Exception {
               3455,
               "stringfield",
               "varcharfield",
-              new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
+              Timestamp.valueOf("2013-07-05 17:01:00"),
               "charfield",
               // There is a regression in Hive 1.2.1 in binary and boolean 
partition columns. Disable for now.
               //"binary",
@@ -293,8 +293,8 @@ public void readAllSupportedHiveDataTypesNativeParquet() 
throws Exception {
               3455,
               "string",
               "varchar",
-              new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-              new DateTime(Date.valueOf("2013-07-05").getTime()),
+              Timestamp.valueOf("2013-07-05 17:01:00"),
+              Date.valueOf("2013-07-05"),
               "char")
           .baselineValues( // All fields are null, but partition fields have 
non-null values
               null, null, null, null, null, null, null, null, null, null, 
null, null, null, null, null, null, null,
@@ -314,8 +314,8 @@ public void readAllSupportedHiveDataTypesNativeParquet() 
throws Exception {
               3455,
               "string",
               "varchar",
-              new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-              new DateTime(Date.valueOf("2013-07-05").getTime()),
+              Timestamp.valueOf("2013-07-05 17:01:00"),
+              Date.valueOf("2013-07-05"),
               "char")
           .go();
     } finally {
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillValuesRelBase.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillValuesRelBase.java
index 96702ebf20..6bed360bb3 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillValuesRelBase.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillValuesRelBase.java
@@ -17,11 +17,15 @@
  */
 package org.apache.drill.exec.planner.common;
 
-import com.fasterxml.jackson.core.JsonLocation;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.util.TokenBuffer;
-import com.google.common.collect.ImmutableList;
+import static org.apache.drill.exec.planner.logical.DrillOptiq.isLiteralNull;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.List;
+
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.plan.RelTraitSet;
 import org.apache.calcite.rel.core.Values;
@@ -36,11 +40,11 @@
 import org.joda.time.DateTimeConstants;
 import org.joda.time.Period;
 
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.util.List;
-
-import static org.apache.drill.exec.planner.logical.DrillOptiq.isLiteralNull;
+import com.fasterxml.jackson.core.JsonLocation;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.util.TokenBuffer;
+import com.google.common.collect.ImmutableList;
 
 /**
  * Base class for logical and physical Values implemented in Drill.
@@ -197,7 +201,7 @@ private static void writeLiteral(RexLiteral literal, 
JsonOutput out) throws IOEx
         if (isLiteralNull(literal)) {
           out.writeDateNull();
         } else {
-          out.writeDate(new DateTime(literal.getValue()));
+          out.writeDate(new Date(new 
DateTime(literal.getValue()).getMillis()));
         }
         return;
 
@@ -205,7 +209,7 @@ private static void writeLiteral(RexLiteral literal, 
JsonOutput out) throws IOEx
         if (isLiteralNull(literal)) {
           out.writeTimeNull();
         } else {
-          out.writeTime(new DateTime(literal.getValue()));
+          out.writeTime(new Time(new 
DateTime(literal.getValue()).getMillis()));
         }
         return;
 
@@ -213,7 +217,7 @@ private static void writeLiteral(RexLiteral literal, 
JsonOutput out) throws IOEx
         if (isLiteralNull(literal)) {
           out.writeTimestampNull();
         } else {
-          out.writeTimestamp(new DateTime(literal.getValue()));
+          out.writeTimestamp(new Timestamp(new 
DateTime(literal.getValue()).getMillis()));
         }
         return;
 
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/bson/BsonRecordReader.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/bson/BsonRecordReader.java
index b01413e67d..786f62bb28 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/bson/BsonRecordReader.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/bson/BsonRecordReader.java
@@ -17,8 +17,6 @@
  */
 package org.apache.drill.exec.store.bson;
 
-import io.netty.buffer.DrillBuf;
-
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
@@ -38,8 +36,7 @@
 import org.apache.drill.exec.vector.complex.impl.MapOrListWriterImpl;
 import org.apache.drill.exec.vector.complex.writer.BaseWriter;
 import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
-import org.apache.drill.exec.vector.complex.writer.DateWriter;
-import org.apache.drill.exec.vector.complex.writer.TimeWriter;
+import org.apache.drill.exec.vector.complex.writer.TimeStampWriter;
 import org.bson.BsonBinary;
 import org.bson.BsonReader;
 import org.bson.BsonType;
@@ -47,6 +44,8 @@
 
 import com.google.common.base.Preconditions;
 
+import io.netty.buffer.DrillBuf;
+
 public class BsonRecordReader {
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(BsonRecordReader.class);
   public final static int MAX_RECORD_SIZE = 128 * 1024;
@@ -247,14 +246,14 @@ private void writeBinary(BsonReader reader, final 
MapOrListWriterImpl writer, St
   }
 
   private void writeTimeStamp(int timestamp, final MapOrListWriterImpl writer, 
String fieldName, boolean isList) {
-    DateTime dateTime = new DateTime(timestamp);
-    TimeWriter t;
+    DateTime dateTime = new DateTime(timestamp*1000L);
+    TimeStampWriter t;
     if (isList == false) {
-      t = writer.map.time(fieldName);
+      t = writer.map.timeStamp(fieldName);
     } else {
-      t = writer.list.time();
+      t = writer.list.timeStamp();
     }
-    t.writeTime((int) 
(dateTime.withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis()));
+    t.writeTimeStamp((int) 
(dateTime.withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis()));
   }
 
   private void writeString(String readString, final MapOrListWriterImpl 
writer, String fieldName, boolean isList) {
@@ -297,13 +296,13 @@ private void writeDouble(double readDouble, final 
MapOrListWriterImpl writer, St
 
   private void writeDateTime(long readDateTime, final MapOrListWriterImpl 
writer, String fieldName, boolean isList) {
     DateTime date = new DateTime(readDateTime);
-    DateWriter dt;
+    TimeStampWriter dt;
     if (isList == false) {
-      dt = writer.map.date(fieldName);
+      dt = writer.map.timeStamp(fieldName);
     } else {
-      dt = writer.list.date();
+      dt = writer.list.timeStamp();
     }
-    
dt.writeDate(date.withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis());
+    
dt.writeTimeStamp(date.withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis());
   }
 
   private void writeBoolean(boolean readBoolean, final MapOrListWriterImpl 
writer, String fieldName, boolean isList) {
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/BasicJsonOutput.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/BasicJsonOutput.java
index db5fbeff9b..cc01f5dc5f 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/BasicJsonOutput.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/BasicJsonOutput.java
@@ -19,12 +19,12 @@
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
 
-import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.expr.fn.impl.DateUtility;
 import org.apache.drill.exec.vector.complex.reader.FieldReader;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
 import org.joda.time.Period;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
@@ -210,7 +210,7 @@ public void writeBoolean(FieldReader reader) throws 
IOException {
   @Override
   public void writeDate(FieldReader reader) throws IOException {
     if (reader.isSet()) {
-      writeDate(reader.readDateTime());
+      writeDate(reader.readDate());
     } else {
       writeDateNull();
     }
@@ -219,7 +219,7 @@ public void writeDate(FieldReader reader) throws 
IOException {
   @Override
   public void writeTime(FieldReader reader) throws IOException {
     if (reader.isSet()) {
-      writeTime(reader.readDateTime());
+      writeTime(reader.readTime());
     } else {
       writeTimeNull();
     }
@@ -228,7 +228,7 @@ public void writeTime(FieldReader reader) throws 
IOException {
   @Override
   public void writeTimestamp(FieldReader reader) throws IOException {
     if (reader.isSet()) {
-      writeTimestamp(reader.readDateTime());
+      writeTimestamp(reader.readTimestamp());
     } else {
       writeTimeNull();
     }
@@ -336,7 +336,7 @@ public void writeBoolean(int index, FieldReader reader) 
throws IOException {
   @Override
   public void writeDate(int index, FieldReader reader) throws IOException {
     if (reader.isSet()) {
-      writeDate(reader.readDateTime(index));
+      writeDate(reader.readDate(index));
     } else {
       writeDateNull();
     }
@@ -345,7 +345,7 @@ public void writeDate(int index, FieldReader reader) throws 
IOException {
   @Override
   public void writeTime(int index, FieldReader reader) throws IOException {
     if (reader.isSet()) {
-      writeTime(reader.readDateTime(index));
+      writeTime(reader.readTime(index));
     } else {
       writeTimeNull();
     }
@@ -354,7 +354,7 @@ public void writeTime(int index, FieldReader reader) throws 
IOException {
   @Override
   public void writeTimestamp(int index, FieldReader reader) throws IOException 
{
     if (reader.isSet()) {
-      writeTimestamp(reader.readDateTime(index));
+      writeTimestamp(reader.readTimestamp(index));
     } else {
       writeTimestampNull();
     }
@@ -434,18 +434,18 @@ public void writeBoolean(boolean value) throws 
IOException {
   }
 
   @Override
-  public void writeDate(DateTime value) throws IOException {
-    gen.writeString(dateFormatter.print(value.withZone(DateTimeZone.UTC)));
+  public void writeDate(Date value) throws IOException {
+    gen.writeString(dateFormatter.print(value.getTime()));
   }
 
   @Override
-  public void writeTime(DateTime value) throws IOException {
-    gen.writeString(timeFormatter.print(value.withZone(DateTimeZone.UTC)));
+  public void writeTime(Time value) throws IOException {
+    gen.writeString(timeFormatter.print(value.getTime()));
   }
 
   @Override
-  public void writeTimestamp(DateTime value) throws IOException {
-    
gen.writeString(timestampFormatter.print(value.withZone(DateTimeZone.UTC)));
+  public void writeTimestamp(Timestamp value) throws IOException {
+    gen.writeString(timestampFormatter.print(value.getTime()));
   }
 
   @Override
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
index 1bdb9b6199..fa55bc9a53 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
@@ -19,10 +19,10 @@
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
 
-import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.vector.complex.reader.FieldReader;
-import org.joda.time.DateTime;
 import org.joda.time.Period;
 
 import com.fasterxml.jackson.core.JsonGenerator;
@@ -54,7 +54,7 @@ public void writeBinary(byte[] value) throws IOException {
   }
 
   @Override
-  public void writeDate(DateTime value) throws IOException {
+  public void writeDate(Date value) throws IOException {
     gen.writeStartObject();
     gen.writeFieldName(ExtendedType.DATE.serialized);
     super.writeDate(value);
@@ -62,7 +62,7 @@ public void writeDate(DateTime value) throws IOException {
   }
 
   @Override
-  public void writeTime(DateTime value) throws IOException {
+  public void writeTime(Time value) throws IOException {
     gen.writeStartObject();
     gen.writeFieldName(ExtendedType.TIME.serialized);
     super.writeTime(value);
@@ -70,7 +70,7 @@ public void writeTime(DateTime value) throws IOException {
   }
 
   @Override
-  public void writeTimestamp(DateTime value) throws IOException {
+  public void writeTimestamp(Timestamp value) throws IOException {
     gen.writeStartObject();
     gen.writeFieldName(ExtendedType.TIMESTAMP.serialized);
     super.writeTimestamp(value);
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonOutput.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonOutput.java
index a921142ab0..c5aeedcf5e 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonOutput.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonOutput.java
@@ -19,9 +19,11 @@
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
 
 import org.apache.drill.exec.vector.complex.reader.FieldReader;
-import org.joda.time.DateTime;
 import org.joda.time.Period;
 
 /**
@@ -52,9 +54,9 @@
   void writeVar16Char(String value) throws IOException;
   void writeBinary(byte[] value) throws IOException;
   void writeBoolean(boolean value) throws IOException;
-  void writeDate(DateTime value) throws IOException;
-  void writeTime(DateTime value) throws IOException;
-  void writeTimestamp(DateTime value) throws IOException;
+  void writeDate(Date value) throws IOException;
+  void writeTime(Time value) throws IOException;
+  void writeTimestamp(Timestamp value) throws IOException;
   void writeInterval(Period value) throws IOException;
   void writeDecimalNull() throws IOException;
   void writeTinyIntNull() throws IOException;
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
index 5ae0c380dc..d75e2c5fd8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
@@ -17,6 +17,13 @@
  */
 package org.apache.drill;
 
+import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatDate;
+import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStamp;
+
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.test.BaseTestQuery;
@@ -27,11 +34,6 @@
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.math.BigDecimal;
-
-import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatDate;
-import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStamp;
-
 @Category(SqlFunctionTest.class)
 public class TestFunctionsQuery extends BaseTestQuery {
 
@@ -554,7 +556,7 @@ public void testTimeStampConstant() throws Exception {
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("TS")
-        .baselineValues(date)
+        .baselineValues(new Timestamp(date.getMillis()))
         .go();
   }
 
@@ -691,7 +693,7 @@ public void testFunctionCaseInsensitiveNames() throws 
Exception {
         "To_DaTe('2003/07/09', 'yyyy/MM/dd') as col3 " +
         "from cp.`employee.json` LIMIT 1";
 
-    DateTime date = formatDate.parseDateTime("2003-07-09");
+    Date date = new Date(formatDate.parseDateTime("2003-07-09").getMillis());
 
     testBuilder()
         .sqlQuery(query)
@@ -758,8 +760,8 @@ public void testToTimeStamp() throws Exception {
     String query = "select to_timestamp(cast('800120400.12312' as decimal(38, 
5))) as DEC38_TS, to_timestamp(200120400) as INT_TS " +
         "from cp.`employee.json` where employee_id < 2";
 
-    DateTime result1 = new DateTime(800120400123l);
-    DateTime result2 = new DateTime(200120400000l);
+    Timestamp result1 = new Timestamp(800120400123l);
+    Timestamp result2 = new Timestamp(200120400000l);
 
     testBuilder()
         .sqlQuery(query)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index b9e17522f9..f86839d8b3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -427,7 +427,7 @@ public void testStarWithOtherExpression() throws Exception {
   @Test // DRILL-1500
   @Category(UnlikelyTest.class)
   public void testStarPartitionFilterOrderBy() throws Exception {
-    org.joda.time.DateTime mydate = new 
org.joda.time.DateTime("1994-01-20T00:00:00.000");
+    java.sql.Date mydate = new java.sql.Date(new 
org.joda.time.DateTime("1994-01-20T00:00:00.000").getMillis());
 
     testBuilder()
     .sqlQuery("select * from dfs.`multilevel/parquet` where dir0=1994 and 
dir1='Q1' order by dir0 limit 1")
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
index 14a49b5200..a187000c43 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
@@ -17,19 +17,23 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import mockit.integration.junit4.JMockit;
-import org.apache.drill.test.BaseTestQuery;
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.DateTime;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
-import java.math.BigDecimal;
-import java.util.List;
-import java.util.Map;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import mockit.integration.junit4.JMockit;
 
 @RunWith(JMockit.class)
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
@@ -82,7 +86,7 @@ public void testToDateForTimeStamp() throws Exception {
       .sqlQuery(query)
       .ordered()
       .baselineColumns("col")
-      .baselineValues(new DateTime(1969, 12, 31, 0, 0))
+      .baselineValues(new Date(new DateTime(1969, 12, 31, 0, 0).getMillis()))
       .build()
       .run();
   }
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
index b96a32a739..6d17fe0b63 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateAddFunctions.java
@@ -17,9 +17,12 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.test.BaseTestQuery;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.DateTime;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -41,12 +44,12 @@ public void testDateAddIntervalDay() throws Exception {
       .sqlQuery(query)
       .unOrdered()
       .baselineColumns("col1", "col2", "col3", "col4", "col5", "col6")
-      .baselineValues(DateTime.parse("2015-01-27T07:27:05.0"),
-                      DateTime.parse("2015-01-29T07:27:05.0"),
-                      DateTime.parse("2015-01-24T12:27:05.0"),
-                      DateTime.parse("2015-01-24T07:32:05.0"),
-                      DateTime.parse("2015-01-24T07:27:10.0"),
-                      DateTime.parse("2015-01-29T17:47:35.0"))
+      .baselineValues(new 
Timestamp(DateTime.parse("2015-01-27T07:27:05.0").getMillis()),
+                      new 
Timestamp(DateTime.parse("2015-01-29T07:27:05.0").getMillis()),
+                      new 
Timestamp(DateTime.parse("2015-01-24T12:27:05.0").getMillis()),
+                      new 
Timestamp(DateTime.parse("2015-01-24T07:32:05.0").getMillis()),
+                      new 
Timestamp(DateTime.parse("2015-01-24T07:27:10.0").getMillis()),
+                      new 
Timestamp(DateTime.parse("2015-01-29T17:47:35.0").getMillis()))
       .go();
   }
 
@@ -61,9 +64,9 @@ public void testDateAddIntervalYear() throws Exception {
       .sqlQuery(query)
       .unOrdered()
       .baselineColumns("col1", "col2", "col3")
-      .baselineValues(DateTime.parse("2015-04-24"),
-                      DateTime.parse("2015-06-24"),
-                      DateTime.parse("2020-01-24"))
+      .baselineValues(new Date(DateTime.parse("2015-04-24").getMillis()),
+                      new Date(DateTime.parse("2015-06-24").getMillis()),
+                      new Date(DateTime.parse("2020-01-24").getMillis()))
       .go();
   }
 
@@ -77,8 +80,8 @@ public void testDateAddIntegerAsDay() throws Exception {
       .sqlQuery(query)
       .unOrdered()
       .baselineColumns("col1", "col2")
-      .baselineValues(DateTime.parse("2015-01-27"),
-                      DateTime.parse("2015-01-29"))
+      .baselineValues(new Date(DateTime.parse("2015-01-27").getMillis()),
+                      new Date(DateTime.parse("2015-01-29").getMillis()))
       .go();
   }
 }
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
index 0ef9928a5d..39880a9582 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateTruncFunctions.java
@@ -17,18 +17,21 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.test.BaseTestQuery;
+import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatDate;
+import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTime;
+import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStamp;
+
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
-import org.joda.time.DateTime;
+import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.Period;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatDate;
-import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTime;
-import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStamp;
-
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestDateTruncFunctions extends BaseTestQuery {
 
@@ -52,16 +55,16 @@ public void dateTruncOnTime() throws Exception {
         .unOrdered()
         .baselineColumns("second", "minute", "hour", "day", "month", "year", 
"quarter", "decade", "century", "millennium")
         .baselineValues(
-            formatTime.parseDateTime("2:30:21.0"), // seconds
-            formatTime.parseDateTime("2:30:00.0"), // minute
-            formatTime.parseDateTime("2:00:00.0"), // hour
-            formatTime.parseDateTime("0:00:00.0"), // day
-            formatTime.parseDateTime("0:00:00.0"), // month
-            formatTime.parseDateTime("0:00:00.0"), // year
-            formatTime.parseDateTime("0:00:00.0"), // quarter
-            formatTime.parseDateTime("0:00:00.0"), // decade
-            formatTime.parseDateTime("0:00:00.0"), // century
-            formatTime.parseDateTime("0:00:00.0")) // millennium
+            new Time(formatTime.parseDateTime("2:30:21.0").getMillis()), // 
seconds
+            new Time(formatTime.parseDateTime("2:30:00.0").getMillis()), // 
minute
+            new Time(formatTime.parseDateTime("2:00:00.0").getMillis()), // 
hour
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis()), // day
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis()), // 
month
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis()), // 
year
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis()), // 
quarter
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis()), // 
decade
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis()), // 
century
+            new Time(formatTime.parseDateTime("0:00:00.0").getMillis())) // 
millennium
         .go();
   }
 
@@ -88,19 +91,19 @@ public void dateTruncOnDateSimpleUnits() throws Exception {
         .unOrdered()
         .baselineColumns("second", "minute", "hour", "day", "month", "week" , 
"year", "q1", "q2", "q3", "decade1", "decade2", "decade3")
         .baselineValues(
-            formatDate.parseDateTime("2011-02-03"), // seconds
-            formatDate.parseDateTime("2011-02-03"), // minute
-            formatDate.parseDateTime("2011-02-03"), // hour
-            formatDate.parseDateTime("2011-02-03"), // day
-            formatDate.parseDateTime("2011-02-01"), // month
-            formatDate.parseDateTime("2011-01-31"), // week
-            formatDate.parseDateTime("2011-01-01"), // year
-            formatDate.parseDateTime("2011-04-01"), // quarter-1
-            formatDate.parseDateTime("2011-07-01"), // quarter-2
-            formatDate.parseDateTime("2011-07-01"), // quarter-3
-            formatDate.parseDateTime("2010-01-01"), // decade-1
-            formatDate.parseDateTime("2070-01-01"), // decade-2
-            formatDate.parseDateTime("1970-01-01")) // decade-3
+            new Date(formatDate.parseDateTime("2011-02-03").getMillis()), // 
seconds
+            new Date(formatDate.parseDateTime("2011-02-03").getMillis()), // 
minute
+            new Date(formatDate.parseDateTime("2011-02-03").getMillis()), // 
hour
+            new Date(formatDate.parseDateTime("2011-02-03").getMillis()), // 
day
+            new Date(formatDate.parseDateTime("2011-02-01").getMillis()), // 
month
+            new Date(formatDate.parseDateTime("2011-01-31").getMillis()), // 
week
+            new Date(formatDate.parseDateTime("2011-01-01").getMillis()), // 
year
+            new Date(formatDate.parseDateTime("2011-04-01").getMillis()), // 
quarter-1
+            new Date(formatDate.parseDateTime("2011-07-01").getMillis()), // 
quarter-2
+            new Date(formatDate.parseDateTime("2011-07-01").getMillis()), // 
quarter-3
+            new Date(formatDate.parseDateTime("2010-01-01").getMillis()), // 
decade-1
+            new Date(formatDate.parseDateTime("2070-01-01").getMillis()), // 
decade-2
+            new Date(formatDate.parseDateTime("1970-01-01").getMillis())) // 
decade-3
         .go();
   }
 
@@ -121,11 +124,11 @@ public void dateTruncOnDateCentury() throws Exception {
         .unOrdered()
         .baselineColumns("c1", "c2", "c3", "c4", "c5")
         .baselineValues(
-            formatDate.parseDateTime("2001-01-01"), // c1
-            formatDate.parseDateTime("1901-01-01"), // c2
-            formatDate.parseDateTime("1901-01-01"), // c3
-            formatDate.parseDateTime("0801-01-01"), // c4
-            formatDate.parseDateTime("0001-01-01")) // c5
+            new Date(formatDate.parseDateTime("2001-01-01").getMillis()), // c1
+            new Date(formatDate.parseDateTime("1901-01-01").getMillis()), // c2
+            new Date(formatDate.parseDateTime("1901-01-01").getMillis()), // c3
+            new Date(formatDate.parseDateTime("0801-01-01").getMillis()), // c4
+            new Date(formatDate.parseDateTime("0001-01-01").getMillis())) // c5
         .go();
   }
 
@@ -153,11 +156,11 @@ public void dateTruncOnDateMillennium() throws Exception {
         .unOrdered()
         .baselineColumns("m1", "m2", "m3", "m4", "m5")
         .baselineValues(
-            formatDate.parseDateTime("2001-01-01"), // m1
-            formatDate.parseDateTime("1001-01-01"), // m2
-            formatDate.parseDateTime("1001-01-01"), // m3
-            formatDate.parseDateTime("0001-01-01"), // m4
-            formatDate.parseDateTime("0001-01-01")) // m5
+                new Date(formatDate.parseDateTime("2001-01-01").getMillis()), 
// m1
+                new Date(formatDate.parseDateTime("1001-01-01").getMillis()), 
// m2
+                new Date(formatDate.parseDateTime("1001-01-01").getMillis()), 
// m3
+                new Date(formatDate.parseDateTime("0001-01-01").getMillis()), 
// m4
+                new Date(formatDate.parseDateTime("0001-01-01").getMillis())) 
// m5
         .go();
   }
 
@@ -184,19 +187,19 @@ public void dateTruncOnTimeStampSimpleUnits() throws 
Exception {
         .unOrdered()
         .baselineColumns("second", "minute", "hour", "day", "month", "week" , 
"year", "q1", "q2", "q3", "decade1", "decade2", "decade3")
         .baselineValues(
-            formatTimeStamp.parseDateTime("2011-02-03 10:11:12.0"), // seconds
-            formatTimeStamp.parseDateTime("2011-02-03 10:11:00.0"), // minute
-            formatTimeStamp.parseDateTime("2011-02-03 10:00:00.0"), // hour
-            formatTimeStamp.parseDateTime("2011-02-03 00:00:00.0"), // day
-            formatTimeStamp.parseDateTime("2011-02-01 00:00:00.0"), // month
-            formatTimeStamp.parseDateTime("2011-01-31 00:00:00.0"), // week
-            formatTimeStamp.parseDateTime("2011-01-01 00:00:00.0"), // year
-            formatTimeStamp.parseDateTime("2011-04-01 00:00:00.0"), // 
quarter-1
-            formatTimeStamp.parseDateTime("2011-07-01 00:00:00.0"), // 
quarter-2
-            formatTimeStamp.parseDateTime("2011-07-01 00:00:00.0"), // 
quarter-3
-            formatTimeStamp.parseDateTime("2010-01-01 00:00:00.0"), // decade-1
-            formatTimeStamp.parseDateTime("2070-01-01 00:00:00.0"), // decade-2
-            formatTimeStamp.parseDateTime("1970-01-01 00:00:00.0")) // decade-3
+            new Timestamp(formatTimeStamp.parseDateTime("2011-02-03 
10:11:12.0").getMillis()), // seconds
+            new Timestamp(formatTimeStamp.parseDateTime("2011-02-03 
10:11:00.0").getMillis()), // minute
+            new Timestamp(formatTimeStamp.parseDateTime("2011-02-03 
10:00:00.0").getMillis()), // hour
+            new Timestamp(formatTimeStamp.parseDateTime("2011-02-03 
00:00:00.0").getMillis()), // day
+            new Timestamp(formatTimeStamp.parseDateTime("2011-02-01 
00:00:00.0").getMillis()), // month
+            new Timestamp(formatTimeStamp.parseDateTime("2011-01-31 
00:00:00.0").getMillis()), // week
+            new Timestamp(formatTimeStamp.parseDateTime("2011-01-01 
00:00:00.0").getMillis()), // year
+            new Timestamp(formatTimeStamp.parseDateTime("2011-04-01 
00:00:00.0").getMillis()), // quarter-1
+            new Timestamp(formatTimeStamp.parseDateTime("2011-07-01 
00:00:00.0").getMillis()), // quarter-2
+            new Timestamp(formatTimeStamp.parseDateTime("2011-07-01 
00:00:00.0").getMillis()), // quarter-3
+            new Timestamp(formatTimeStamp.parseDateTime("2010-01-01 
00:00:00.0").getMillis()), // decade-1
+            new Timestamp(formatTimeStamp.parseDateTime("2070-01-01 
00:00:00.0").getMillis()), // decade-2
+            new Timestamp(formatTimeStamp.parseDateTime("1970-01-01 
00:00:00.0").getMillis())) // decade-3
         .go();
   }
 
@@ -217,11 +220,11 @@ public void dateTruncOnTimeStampCentury() throws 
Exception {
         .unOrdered()
         .baselineColumns("c1", "c2", "c3", "c4", "c5")
         .baselineValues(
-            formatTimeStamp.parseDateTime("2001-01-01 00:00:00.0"), // c1
-            formatTimeStamp.parseDateTime("1901-01-01 00:00:00.0"), // c2
-            formatTimeStamp.parseDateTime("1901-01-01 00:00:00.0"), // c3
-            formatTimeStamp.parseDateTime("0801-01-01 00:00:00.0"), // c4
-            formatTimeStamp.parseDateTime("0001-01-01 00:00:00.0")) // c5
+            new Timestamp(formatTimeStamp.parseDateTime("2001-01-01 
00:00:00.0").getMillis()), // c1
+            new Timestamp(formatTimeStamp.parseDateTime("1901-01-01 
00:00:00.0").getMillis()), // c2
+            new Timestamp(formatTimeStamp.parseDateTime("1901-01-01 
00:00:00.0").getMillis()), // c3
+            new Timestamp(formatTimeStamp.parseDateTime("0801-01-01 
00:00:00.0").getMillis()), // c4
+            new Timestamp(formatTimeStamp.parseDateTime("0001-01-01 
00:00:00.0").getMillis())) // c5
         .go();
   }
 
@@ -242,11 +245,11 @@ public void dateTruncOnTimeStampMillennium() throws 
Exception {
         .unOrdered()
         .baselineColumns("m1", "m2", "m3", "m4", "m5")
         .baselineValues(
-            formatTimeStamp.parseDateTime("2001-01-01 00:00:00.0"), // m1
-            formatTimeStamp.parseDateTime("1001-01-01 00:00:00.0"), // m2
-            formatTimeStamp.parseDateTime("1001-01-01 00:00:00.0"), // m3
-            formatTimeStamp.parseDateTime("0001-01-01 00:00:00.0"), // m4
-            formatTimeStamp.parseDateTime("0001-01-01 00:00:00.0")) // m5
+            new Timestamp(formatTimeStamp.parseDateTime("2001-01-01 
00:00:00.0").getMillis()), // m1
+            new Timestamp(formatTimeStamp.parseDateTime("1001-01-01 
00:00:00.0").getMillis()), // m2
+            new Timestamp(formatTimeStamp.parseDateTime("1001-01-01 
00:00:00.0").getMillis()), // m3
+            new Timestamp(formatTimeStamp.parseDateTime("0001-01-01 
00:00:00.0").getMillis()), // m4
+            new Timestamp(formatTimeStamp.parseDateTime("0001-01-01 
00:00:00.0").getMillis())) // m5
         .go();
   }
 
@@ -327,12 +330,12 @@ public void testDateTrunc() throws Exception {
         + "date_trunc('YEAR', date '2011-2-2') as DATE2 "
         + "from cp.`employee.json` where employee_id < 2";
 
-    DateTime time1 = formatTime.parseDateTime("2:30:00.0");
-    DateTime time2 = formatTime.parseDateTime("2:30:21.0");
-    DateTime ts1 = formatTimeStamp.parseDateTime("1991-05-05 10:00:00.0");
-    DateTime ts2 = formatTimeStamp.parseDateTime("1991-05-05 10:11:12.0");
-    DateTime date1 = formatDate.parseDateTime("2011-02-01");
-    DateTime date2 = formatDate.parseDateTime("2011-01-01");
+    Time time1 = new Time(formatTime.parseDateTime("2:30:00.0").getMillis());
+    Time time2 = new Time(formatTime.parseDateTime("2:30:21.0").getMillis());
+    Timestamp ts1 = new Timestamp(formatTimeStamp.parseDateTime("1991-05-05 
10:00:00.0").getMillis());
+    Timestamp ts2 = new Timestamp(formatTimeStamp.parseDateTime("1991-05-05 
10:11:12.0").getMillis());
+    Date date1 = new Date(formatDate.parseDateTime("2011-02-01").getMillis());
+    Date date2 = new Date(formatDate.parseDateTime("2011-01-01").getMillis());
 
     testBuilder()
         .sqlQuery(query)
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
index a546a502b1..16517fb63f 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNewDateFunctions.java
@@ -17,9 +17,11 @@
  */
 package org.apache.drill.exec.fn.impl;
 
-import org.apache.drill.test.BaseTestQuery;
+import java.sql.Date;
+
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
@@ -53,9 +55,9 @@ public void testIsDate() throws Exception {
         .sqlQuery("select case when isdate(date1) then cast(date1 as date) 
else null end res1 from " + dateValues)
         .unOrdered()
         .baselineColumns("res1")
-        .baselineValues(new DateTime(1900, 1, 1, 0, 0))
-        .baselineValues(new DateTime(3500, 1, 1, 0, 0))
-        .baselineValues(new DateTime(2000, 12, 31, 0, 0))
+        .baselineValues(new Date(new DateTime(1900, 1, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(3500, 1, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(2000, 12, 31, 0, 0).getMillis()))
         .baselineValues(new Object[] {null})
         .baselineValues(new Object[] {null})
         .baselineValues(new Object[] {null})
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
index 493a3b66f7..6e28b68139 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java
@@ -20,6 +20,7 @@
 import static org.junit.Assert.assertEquals;
 
 import java.nio.ByteBuffer;
+import java.sql.Timestamp;
 import java.util.List;
 
 import org.apache.drill.categories.SlowTest;
@@ -48,12 +49,11 @@
 import org.apache.drill.exec.store.mock.MockSubScanPOP;
 import org.apache.drill.exec.store.mock.MockTableDef;
 import org.apache.drill.exec.vector.ValueVector;
-import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import com.google.common.collect.Lists;
-import org.junit.experimental.categories.Category;
 
 @Category({SlowTest.class, SqlTest.class})
 public class ExpressionInterpreterTest  extends PopUnitTestBase {
@@ -141,7 +141,7 @@ public void interpreterDateTest() throws Exception {
     final ByteBuffer buffer = ByteBuffer.allocate(12);
     buffer.putLong(out.value);
     final long l = buffer.getLong(0);
-    final DateTime t = new DateTime(l);
+    final Timestamp t = new Timestamp(l);
 
     final String[] expectedFirstTwoValues = {t.toString(), t.toString()};
 
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
index 06d761f6db..6c007daa45 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
@@ -25,11 +25,11 @@
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.sql.Date;
+import java.sql.Time;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.test.BaseTestQuery;
-import org.apache.drill.test.QueryTestUtil;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.compile.ClassTransformer.ScalarReplacementOption;
@@ -44,16 +44,18 @@
 import org.apache.drill.exec.util.VectorUtil;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.VarCharVector;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.QueryTestUtil;
 import org.joda.time.DateTime;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import com.google.common.base.Charsets;
 import com.google.common.io.Resources;
 
 import io.netty.buffer.DrillBuf;
-import org.junit.experimental.categories.Category;
 
 @Category(UnlikelyTest.class)
 public class TestConvertFunctions extends BaseTestQuery {
@@ -66,8 +68,8 @@
   private static final String DATE_TIME_BE = 
"\\x00\\x00\\x00\\x49\\x77\\x85\\x1f\\x8e";
   private static final String DATE_TIME_LE = 
"\\x8e\\x1f\\x85\\x77\\x49\\x00\\x00\\x00";
 
-  private static DateTime time = DateTime.parse("01:23:45.678", 
DateUtility.getTimeFormatter());
-  private static DateTime date = DateTime.parse("1980-01-01", 
DateUtility.getDateTimeFormatter());
+  private static Time time = new Time(DateTime.parse("01:23:45.678", 
DateUtility.getTimeFormatter()).getMillis());
+  private static Date date = new Date(DateTime.parse("1980-01-01", 
DateUtility.getDateTimeFormatter()).getMillis());
 
   String textFileContent;
 
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestNestedDateTimeTimestamp.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestNestedDateTimeTimestamp.java
new file mode 100644
index 0000000000..63bd9662b1
--- /dev/null
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestNestedDateTimeTimestamp.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.physical.impl;
+
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.drill.exec.rpc.user.QueryDataBatch;
+import org.apache.drill.test.BaseTestQuery;
+import org.joda.time.DateTime;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * For DRILL-6242, output for Date, Time, Timestamp should use different 
classes
+ */
+public class TestNestedDateTimeTimestamp extends BaseTestQuery {
+    private static final String DATAFILE = "cp.`datetime.parquet`";
+    private static final Map<String,Object> expectedRecord = new 
TreeMap<String,Object>();
+
+    static {
+        /**
+         * Data in the parquet file represents this equavilent JSON, but with 
typed data, time, and timestamps:
+         * {
+         *    "date" : "1970-01-11",
+         *    "time" : "00:00:03.600",
+         *    "timestamp" : "2018-03-23T17:40:52.123Z",
+         *    "date_list" : [ "1970-01-11" ],
+         *    "time_list" : [ "00:00:03.600" ],
+         *    "timestamp_list" : [ "2018-03-23T17:40:52.123Z" ],
+         *    "time_map" : {
+         *      "date" : "1970-01-11",
+         *      "time" : "00:00:03.600",
+         *      "timestamp" : "2018-03-23T17:40:52.123Z"
+         *    }
+         *  }
+         *
+         * Note that when the above data is read in to Drill, Drill modifies 
the timestamp
+         * to local time zone, and preserving the <date> and <time> values.  
This effectively
+         * changes the timestamp, if the time zone is not UTC.
+         */
+
+        Date date = Date.valueOf("1970-01-11");
+        Time time = new Time(Timestamp.valueOf("1970-01-01 
00:00:03.600").getTime());
+        Timestamp timestamp = Timestamp.valueOf("2018-03-23 17:40:52.123");
+        expectedRecord.put("`date`", date);
+        expectedRecord.put("`time`", time);
+        expectedRecord.put("`timestamp`", timestamp);
+        expectedRecord.put("`date_list`", Arrays.asList(date));
+        expectedRecord.put("`time_list`", Arrays.asList(time));
+        expectedRecord.put("`timestamp_list`", Arrays.asList(timestamp));
+        Map<String,Object> nestedMap = new TreeMap<String,Object>();
+        nestedMap.put("date", date);
+        nestedMap.put("time", time);
+        nestedMap.put("timestamp", timestamp);
+
+        expectedRecord.put("`time_map`", nestedMap);
+    }
+
+
+    /**
+     * Test reading of from the parquet file that contains nested time, date, 
and timestamp
+     */
+    @Test
+    public void testNested() throws Exception {
+      String query = String.format("select * from %s limit 1", DATAFILE);
+      testBuilder()
+              .sqlQuery(query)
+              .ordered()
+              .baselineRecords(Arrays.asList(expectedRecord))
+              .build()
+              .run();
+    }
+
+    /**
+     * Test the textual display to make sure it is consistent with actual JSON 
output
+     */
+    @Test
+    public void testNestedDateTimePrint() throws Exception {
+        List<QueryDataBatch> resultList = 
testSqlWithResults(String.format("select * from %s limit 1", DATAFILE));
+        String actual = getResultString(resultList, " | ");
+
+        final String expected =
+                "date | time | timestamp | date_list | time_list | 
timestamp_list | time_map\n" +
+                "1970-01-11 | 00:00:03 | 2018-03-23 17:40:52.123 | 
[\"1970-01-11\"] | [\"00:00:03.600\"] | [\"2018-03-23 17:40:52.123\"] | 
{\"date\":\"1970-01-11\",\"time\":\"00:00:03.600\",\"timestamp\":\"2018-03-23 
17:40:52.123\"}";
+
+        Assert.assertEquals(expected.trim(), actual.trim());
+    }
+
+    /**
+     * Test the json output is consistent as before
+     */
+    @Test
+    public void testNestedDateTimeCTASJson() throws Exception {
+        String query = String.format("select * from %s limit 1", DATAFILE);
+        String testName = "ctas_nested_datetime";
+        try {
+            test("alter session set store.format = 'json'");
+            test("alter session set store.json.extended_types = false");
+            test("use dfs.tmp");
+            test("create table " + testName + "_json as " + query);
+
+            final String readQuery = "select * from `" + testName + "_json` t1 
";
+
+            testBuilder()
+                .sqlQuery(readQuery)
+                .ordered()
+                .jsonBaselineFile("baseline_nested_datetime.json")
+                .build()
+                .run();
+        } finally {
+          test("drop table " + testName + "_json");
+          test("alter session reset store.format ");
+          test("alter session reset store.json.extended_types ");
+        }
+    }
+
+    /**
+     * Test the extended json output is consistent as before
+     */
+    @Test
+    public void testNestedDateTimeCTASExtendedJson() throws Exception {
+        String query = String.format("select * from %s limit 1", DATAFILE);
+        String testName = "ctas_nested_datetime_extended";
+        try {
+            test("alter session set store.format = 'json'");
+            test("alter session set store.json.extended_types = true");
+            test("use dfs.tmp");
+            test("create table " + testName + "_json as " + query);
+
+            final String readQuery = "select * from `" + testName + "_json` t1 
";
+
+            testBuilder()
+                .sqlQuery(readQuery)
+                .ordered()
+                .jsonBaselineFile("datetime.parquet")
+                .build()
+                .run();
+        } finally {
+          test("drop table " + testName + "_json");
+          test("alter session reset store.format ");
+          test("alter session reset store.json.extended_types ");
+        }
+    }
+
+    /**
+     * Test parquet output is consistent as before
+     */
+    @Test
+    public void testNestedDateTimeCTASParquet() throws Exception {
+        String query = String.format("select * from %s limit 1", DATAFILE);
+        String testName = "ctas_nested_datetime_extended";
+        try {
+            test("alter session set store.format = 'parquet'");
+            test("use dfs.tmp");
+            test("create table " + testName + "_parquet as " + query);
+
+            final String readQuery = "select * from `" + testName + "_parquet` 
t1 ";
+
+            testBuilder()
+                .sqlQuery(readQuery)
+                .ordered()
+                .jsonBaselineFile("datetime.parquet")
+                .build()
+                .run();
+        } finally {
+          test("drop table " + testName + "_parquet");
+          test("alter session reset store.format ");
+        }
+    }
+
+    /**
+     * Testing time zone change and revert
+     */
+    @Test
+    public void testTimeZoneChangeAndReverse() throws Exception {
+        long timeMillis[] = new long[]{864000000L, 3600L, 1521826852123L};
+
+        for (int i = 0 ; i < timeMillis.length ; i++) {
+            DateTime time1 = new org.joda.time.DateTime(timeMillis[i], 
org.joda.time.DateTimeZone.UTC);
+            DateTime time2 = new DateTime(timeMillis[i], 
org.joda.time.DateTimeZone.UTC).withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
+            DateTime time3 = new 
DateTime(time2.getMillis()).withZoneRetainFields(org.joda.time.DateTimeZone.UTC);
+
+            Assert.assertEquals(time1.toString(), time3.toString());
+            Assert.assertEquals(time1.toString().substring(0,23), 
time2.toString().substring(0,23));
+
+            System.out.println("time1 = " + time1 + ", time2 = " + time2 + ", 
time3 = " + time3);
+            System.out.println("  time1 = " + time1.toString().substring(0,23) 
+ "\n  time2 = " + time2.toString().substring(0,23) + "\n  time3 = " + 
time3.toString().substring(0,23));
+
+        }
+    }
+}
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
index 8729b69592..85f33b3ccf 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
@@ -110,8 +110,8 @@ public void simpleSelect() throws Exception {
         .ordered()
         .baselineColumns("employee_id", "full_name", "position_id", 
"department_id", "birth_date", "hire_date",
             "salary", "fsalary", "single", "education_level", "gender")
-        .baselineValues(1, "Sheri Nowmer", 1, 1L, new 
DateTime(Date.valueOf("1961-08-26").getTime()),
-            new DateTime(Date.valueOf("1994-12-01").getTime()), 80000.0D, 
80000.0F, true, "Graduate Degree", "F")
+        .baselineValues(1, "Sheri Nowmer", 1, 1L, Date.valueOf("1961-08-26"),
+            Date.valueOf("1994-12-01"), 80000.0D, 80000.0F, true, "Graduate 
Degree", "F")
         .go();
   }
 
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
index 95820f24d5..c68fa27ac2 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
@@ -35,6 +35,7 @@
 import java.io.File;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.sql.Date;
 
 /**
  * Tests for compatibility reading old parquet files after date corruption
@@ -144,7 +145,7 @@ public void testReadPartitionedOnCorrectDates() throws 
Exception {
             .sqlQuery(query)
             .unOrdered()
             .baselineColumns("date_col")
-            .baselineValues(new DateTime(1970, 1, 1, 0, 0))
+            .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 
0).getMillis()))
             .go();
       }
     } finally {
@@ -158,8 +159,8 @@ public void testVarcharPartitionedReadWithCorruption() 
throws Exception {
         .sqlQuery("select date_col from dfs.`%s` where length(varchar_col) = 
12", VARCHAR_PARTITIONED)
         .baselineColumns("date_col")
         .unOrdered()
-        .baselineValues(new DateTime(2039, 4, 9, 0, 0))
-        .baselineValues(new DateTime(1999, 1, 8, 0, 0))
+        .baselineValues(new Date(new DateTime(2039, 4, 9, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1999, 1, 8, 0, 0).getMillis()))
         .go();
   }
 
@@ -169,7 +170,7 @@ public void testDatePartitionedReadWithCorruption() throws 
Exception {
         .sqlQuery("select date_col from dfs.`%s` where date_col = 
'1999-04-08'", DATE_PARTITIONED)
         .baselineColumns("date_col")
         .unOrdered()
-        .baselineValues(new DateTime(1999, 4, 8, 0, 0))
+        .baselineValues(new Date(new DateTime(1999, 4, 8, 0, 0).getMillis()))
         .go();
 
     String query = format("select date_col from dfs.`%s` where date_col > 
'1999-04-08'", DATE_PARTITIONED);
@@ -183,10 +184,10 @@ public void 
testCorrectDatesAndExceptionWhileParsingCreatedBy() throws Exception
             EXCEPTION_WHILE_PARSING_CREATED_BY_META)
         .baselineColumns("date_col")
         .unOrdered()
-        .baselineValues(new DateTime(1996, 1, 29, 0, 0))
-        .baselineValues(new DateTime(1996, 3, 1, 0, 0))
-        .baselineValues(new DateTime(1996, 3, 2, 0, 0))
-        .baselineValues(new DateTime(1997, 3, 1, 0, 0))
+        .baselineValues(new Date(new DateTime(1996, 1, 29, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1996, 3, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1996, 3, 2, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1997, 3, 1, 0, 0).getMillis()))
         .go();
   }
 
@@ -238,7 +239,7 @@ public void testCorruptValueDetectionDuringPruning() throws 
Exception {
               .sqlQuery(query)
               .unOrdered()
               .baselineColumns("date_col")
-              .baselineValues(new DateTime(1970, 1, 1, 0, 0))
+              .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 
0).getMillis()))
               .go();
         }
       }
@@ -265,12 +266,12 @@ public void testReadCorruptDatesWithNullFilledColumns() 
throws Exception {
             PARQUET_DATE_FILE_WITH_NULL_FILLED_COLS)
         .unOrdered()
         .baselineColumns("null_dates_1", "null_dates_2", "non_existent_field", 
"date_col")
-        .baselineValues(null, null, null, new DateTime(1970, 1, 1, 0, 0))
-        .baselineValues(null, null, null, new DateTime(1970, 1, 2, 0, 0))
-        .baselineValues(null, null, null, new DateTime(1969, 12, 31, 0, 0))
-        .baselineValues(null, null, null, new DateTime(1969, 12, 30, 0, 0))
-        .baselineValues(null, null, null, new DateTime(1900, 1, 1, 0, 0))
-        .baselineValues(null, null, null, new DateTime(2015, 1, 1, 0, 0))
+        .baselineValues(null, null, null, new Date(new DateTime(1970, 1, 1, 0, 
0).getMillis()))
+        .baselineValues(null, null, null, new Date(new DateTime(1970, 1, 2, 0, 
0).getMillis()))
+        .baselineValues(null, null, null, new Date(new DateTime(1969, 12, 31, 
0, 0).getMillis()))
+        .baselineValues(null, null, null, new Date(new DateTime(1969, 12, 30, 
0, 0).getMillis()))
+        .baselineValues(null, null, null, new Date(new DateTime(1900, 1, 1, 0, 
0).getMillis()))
+        .baselineValues(null, null, null, new Date(new DateTime(2015, 1, 1, 0, 
0).getMillis()))
         .go();
   }
 
@@ -336,7 +337,7 @@ public void testReadOldMetadataCacheFileWithPruning() 
throws Exception {
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("date_col")
-        .baselineValues(new DateTime(1970, 1, 1, 0, 0))
+        .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 0).getMillis()))
         .go();
   }
 
@@ -365,9 +366,9 @@ public void 
testReadNewMetadataCacheFileOverOldAndNewFiles() throws Exception {
         .sqlQuery(query)
         .unOrdered()
         .baselineColumns("date_col")
-        .baselineValues(new DateTime(1970, 1, 1, 0, 0))
-        .baselineValues(new DateTime(1970, 1, 1, 0, 0))
-        .baselineValues(new DateTime(1970, 1, 1, 0, 0))
+        .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 0).getMillis()))
         .go();
   }
 
@@ -377,7 +378,7 @@ public void 
testCorrectDateValuesGeneratedByOldVersionOfDrill() throws Exception
         .sqlQuery("select i_rec_end_date from dfs.`%s` limit 1", 
CORRECT_DATES_1_6_0_PATH)
         .baselineColumns("i_rec_end_date")
         .unOrdered()
-        .baselineValues(new DateTime(2000, 10, 26, 0, 0))
+        .baselineValues(new Date(new DateTime(2000, 10, 26, 0, 0).getMillis()))
         .go();
   }
 
@@ -403,12 +404,12 @@ private void readMixedCorruptedAndCorrectDates() throws 
Exception {
 
   private void addDateBaselineValues(TestBuilder builder) {
     builder
-        .baselineValues(new DateTime(1970, 1, 1, 0, 0))
-        .baselineValues(new DateTime(1970, 1, 2, 0, 0))
-        .baselineValues(new DateTime(1969, 12, 31, 0, 0))
-        .baselineValues(new DateTime(1969, 12, 30, 0, 0))
-        .baselineValues(new DateTime(1900, 1, 1, 0, 0))
-        .baselineValues(new DateTime(2015, 1, 1, 0, 0));
+        .baselineValues(new Date(new DateTime(1970, 1, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1970, 1, 2, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1969, 12, 31, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1969, 12, 30, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(1900, 1, 1, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(2015, 1, 1, 0, 0).getMillis()));
   }
 
   /**
@@ -416,12 +417,12 @@ private void addDateBaselineValues(TestBuilder builder) {
    */
   private void addCorruptedDateBaselineValues(TestBuilder builder) {
     builder
-        .baselineValues(new DateTime(15334, 3, 17, 0, 0))
-        .baselineValues(new DateTime(15334, 3, 18, 0, 0))
-        .baselineValues(new DateTime(15334, 3, 15, 0, 0))
-        .baselineValues(new DateTime(15334, 3, 16, 0, 0))
-        .baselineValues(new DateTime(15264, 3, 16, 0, 0))
-        .baselineValues(new DateTime(15379, 3, 17, 0, 0));
+        .baselineValues(new Date(new DateTime(15334, 3, 17, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(15334, 3, 18, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(15334, 3, 15, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(15334, 3, 16, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(15264, 3, 16, 0, 0).getMillis()))
+        .baselineValues(new Date(new DateTime(15379, 3, 17, 0, 
0).getMillis()));
   }
 
   private void readFilesWithUserDisabledAutoCorrection() throws Exception {
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index c359e69b6d..34ab7e311b 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -49,7 +49,6 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.parquet.hadoop.ParquetFileReader;
 import org.apache.parquet.hadoop.metadata.ParquetMetadata;
-import org.joda.time.DateTime;
 import org.joda.time.Period;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -612,7 +611,7 @@ public void 
tableSchemaWhenSelectFieldsInDef_SelectFieldsInView() throws Excepti
           .unOrdered()
           .sqlQuery("SELECT * FROM dfs.tmp.`%s`", newTblName)
           .baselineColumns("id", "name", "bday")
-          .baselineValues(1, "Sheri Nowmer", new 
DateTime(Date.valueOf("1961-08-26").getTime()))
+          .baselineValues(1, "Sheri Nowmer", Date.valueOf("1961-08-26"))
           .go();
     } finally {
       deleteTableIfExists(newTblName);
@@ -787,7 +786,7 @@ public void testImpalaParquetBinaryAsTimeStamp_DictChange() 
throws Exception {
               "alter session set `%s` = true", 
ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
           .ordered()
           .baselineColumns("date_value")
-          .baselineValues(new DateTime(convertToLocalTimestamp("1970-01-01 
00:00:01.000")))
+          .baselineValues(convertToLocalTimestamp("1970-01-01 00:00:01.000"))
           .build().run();
     } finally {
       resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
@@ -803,7 +802,7 @@ public void testSparkParquetBinaryAsTimeStamp_DictChange() 
throws Exception {
                "alter session set `%s` = true", 
ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
           .ordered()
           .baselineColumns("run_date")
-          .baselineValues(new DateTime(convertToLocalTimestamp("2017-12-06 
16:38:43.988")))
+          .baselineValues(convertToLocalTimestamp("2017-12-06 16:38:43.988"))
           .build().run();
     } finally {
       resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
@@ -861,7 +860,7 @@ public void testHiveParquetTimestampAsInt96_basic() throws 
Exception {
         .sqlQuery("SELECT convert_from(timestamp_field, 'TIMESTAMP_IMPALA')  
as timestamp_field "
              + "from cp.`parquet/part1/hive_all_types.parquet` ")
         .baselineColumns("timestamp_field")
-        .baselineValues(new DateTime(convertToLocalTimestamp("2013-07-06 
00:01:00")))
+        .baselineValues(convertToLocalTimestamp("2013-07-06 00:01:00"))
         .baselineValues((Object)null)
         .go();
   }
@@ -992,10 +991,10 @@ public void testInt96TimeStampValueWidth() throws 
Exception {
           .optionSettingQueriesForTestQuery(
               "alter session set `%s` = true", 
ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
           .baselineColumns("c", "d")
-          .baselineValues(new DateTime(Date.valueOf("2012-12-15").getTime()),
-              new DateTime(convertToLocalTimestamp("2016-04-24 20:06:28")))
-          .baselineValues(new DateTime(Date.valueOf("2011-07-09").getTime()),
-              new DateTime(convertToLocalTimestamp("2015-04-15 22:35:49")))
+          .baselineValues(Date.valueOf("2012-12-15"),
+              convertToLocalTimestamp("2016-04-24 20:06:28"))
+          .baselineValues(Date.valueOf("2011-07-09"),
+              convertToLocalTimestamp("2015-04-15 22:35:49"))
           .build()
           .run();
     } finally {
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
index f18821bd0e..2444192d92 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
@@ -133,8 +133,8 @@ public void testMultipleCasesNullableTypes() throws 
Exception {
 
   @Test //DRILL-5048
   public void testCaseNullableTimestamp() throws Exception {
-    DateTime date = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
-      .parseDateTime("2016-11-17 14:43:23");
+    java.sql.Date date = new 
java.sql.Date(DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
+      .parseDateTime("2016-11-17 14:43:23").getMillis());
 
     testBuilder()
       .sqlQuery("SELECT (CASE WHEN (false) THEN null ELSE CAST('2016-11-17 
14:43:23' AS TIMESTAMP) END) res FROM (values(1)) foo")
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
index 129cfd2508..d5b06c7b73 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
@@ -79,7 +79,7 @@ public void testTimeStampType() throws IOException {
     writer.reset();
     bsonReader.write(writer, new BsonDocumentReader(bsonDoc));
     SingleMapReaderImpl mapReader = (SingleMapReaderImpl) 
writer.getMapVector().getReader();
-    assertEquals(1000l, mapReader.reader("ts").readDateTime().getMillis());
+    assertEquals(1000000l, mapReader.reader("ts").readTimestamp().getTime());
   }
 
   @Test
@@ -202,7 +202,7 @@ public void testDateTimeType() throws IOException {
     writer.reset();
     bsonReader.write(writer, new BsonDocumentReader(bsonDoc));
     SingleMapReaderImpl mapReader = (SingleMapReaderImpl) 
writer.getMapVector().getReader();
-    assertEquals(5262729712L, 
mapReader.reader("dateTimeKey").readDateTime().getMillis());
+    assertEquals(5262729712L, 
mapReader.reader("dateTimeKey").readTimestamp().getTime());
   }
 
   @Test
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
index 8252834d2f..f209de32e4 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
@@ -17,16 +17,19 @@
  */
 package org.apache.drill.exec.store.parquet;
 
+import static org.apache.drill.test.TestBuilder.mapOf;
+
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Arrays;
+
 import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.Period;
 import org.junit.Test;
 
-import java.util.Arrays;
-
-import static org.apache.drill.test.TestBuilder.mapOf;
-
 public class TestParquetComplex extends BaseTestQuery {
 
   private static final String DATAFILE = 
"cp.`store/parquet/complex/complex.parquet`";
@@ -275,21 +278,21 @@ public void testComplexLogicalIntTypes2() throws 
Exception {
         .ordered()
         .baselineColumns(columns)
         .baselineValues(1, "UTF8 string1", "RANDOM_VALUE", 1234567, 123, 
12345, 1234567, 123, 1234, 1234567,
-            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
DateTime("5350-02-17"),
-            new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1973-11-29T21:33:09.012"),
+            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
Date(new DateTime("5350-02-17").getMillis()),
+            new Time(new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1973-11-29T21:33:09.012").getMillis()),
             new 
Period().plusMonths(875770417).plusDays(943142453).plusMillis(1650536505),
             bytes12)
         .baselineValues(2, "UTF8 string2", "MAX_VALUE", 2147483647, 127, 
32767, 2147483647, 255, 65535, -1,
-            9223372036854775807L, 9223372036854775807L, -1L, new 
DateTime("1969-12-31"),
-            new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("2038-01-19T03:14:07.999"),
+            9223372036854775807L, 9223372036854775807L, -1L, new Date(new 
DateTime("1969-12-31").getMillis()),
+            new Time(new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("2038-01-19T03:14:07.999").getMillis()),
             new 
Period().plusMonths(16843009).plusDays(16843009).plusMillis(16843009),
             bytesOnes)
         .baselineValues(3, "UTF8 string3", "MIN_VALUE", -2147483648, -128, 
-32768, -2147483648, 0, 0, 0,
-            -9223372036854775808L, -9223372036854775808L, 0L, new 
DateTime("1970-01-01"),
-            new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1970-01-01T00:00:00.0"), new Period("PT0S"), 
bytesZeros)
+            -9223372036854775808L, -9223372036854775808L, 0L, new Date(new 
DateTime("1970-01-01").getMillis()),
+            new Time(new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1970-01-01T00:00:00.0").getMillis()), 
new Period("PT0S"), bytesZeros)
         .build()
         .run();
   }
@@ -350,21 +353,21 @@ public void testComplexLogicalIntTypes3() throws 
Exception {
         .ordered()
         .baselineColumns(columns)
         .baselineValues(1, "UTF8 string1", "RANDOM_VALUE", 1234567, 123, 
12345, 1234567, 123, 1234, 1234567,
-            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
DateTime("5350-02-17"),
-            new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1973-11-29T21:33:09.012"),
+            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
Date(new DateTime("5350-02-17").getMillis()),
+            new Time(new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1973-11-29T21:33:09.012").getMillis()),
             new 
Period().plusMonths(875770417).plusDays(943142453).plusMillis(1650536505),
             bytes12)
         .baselineValues(2, "UTF8 string2", "MAX_VALUE", 2147483647, 127, 
32767, 2147483647, 255, 65535, -1,
-            9223372036854775807L, 9223372036854775807L, -1L, new 
DateTime("1969-12-31"),
-            new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("2038-01-19T03:14:07.999"),
+            9223372036854775807L, 9223372036854775807L, -1L, new Date(new 
DateTime("1969-12-31").getMillis()),
+            new Time(new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("2038-01-19T03:14:07.999").getMillis()),
             new 
Period().plusMonths(16843009).plusDays(16843009).plusMillis(16843009),
             bytesOnes)
         .baselineValues(3, "UTF8 string3", "MIN_VALUE", -2147483648, -128, 
-32768, -2147483648, 0, 0, 0,
-            -9223372036854775808L, -9223372036854775808L, 0L, new 
DateTime("1970-01-01"),
-            new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1970-01-01T00:00:00.0"), new Period("PT0S"), 
bytesZeros)
+            -9223372036854775808L, -9223372036854775808L, 0L, new Date(new 
DateTime("1970-01-01").getMillis()),
+            new Time(new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1970-01-01T00:00:00.0").getMillis()), 
new Period("PT0S"), bytesZeros)
         .baselineValues(4, null, null, null, null, null, null, null, null, 
null, null, null, null, null,
             null, null, null, null)
         .build().run();
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
index 6bd8dd66a4..ca96317017 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
@@ -17,8 +17,14 @@
  */
 package org.apache.drill.exec.store.parquet2;
 
-import org.apache.drill.test.BaseTestQuery;
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Arrays;
+
 import org.apache.drill.exec.planner.physical.PlannerSettings;
+import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.Period;
@@ -26,9 +32,6 @@
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.math.BigDecimal;
-import java.util.Arrays;
-
 public class TestDrillParquetReader extends BaseTestQuery {
   // enable decimal data type
   @BeforeClass
@@ -177,21 +180,21 @@ public void testLogicalIntTypes2() throws Exception {
         .ordered()
         .baselineColumns(columns)
         .baselineValues(1, "UTF8 string1", "RANDOM_VALUE", 1234567, 123, 
12345, 1234567, 123, 1234, 1234567,
-            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
DateTime("5350-02-17"),
-            new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1973-11-29T21:33:09.012"),
+            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
Date(new DateTime("5350-02-17").getMillis()),
+            new Time(new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1973-11-29T21:33:09.012").getMillis()),
             new 
Period().plusMonths(875770417).plusDays(943142453).plusMillis(1650536505),
             bytes12)
         .baselineValues(2, "UTF8 string2", "MAX_VALUE", 2147483647, 127, 
32767, 2147483647, 255, 65535, -1,
-            9223372036854775807L, 9223372036854775807L, -1L, new 
DateTime("1969-12-31"),
-            new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("2038-01-19T03:14:07.999"),
+            9223372036854775807L, 9223372036854775807L, -1L, new Date(new 
DateTime("1969-12-31").getMillis()),
+            new Time(new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("2038-01-19T03:14:07.999").getMillis()),
             new 
Period().plusMonths(16843009).plusDays(16843009).plusMillis(16843009),
             bytesOnes)
         .baselineValues(3, "UTF8 string3", "MIN_VALUE", -2147483648, -128, 
-32768, -2147483648, 0, 0, 0,
-            -9223372036854775808L, -9223372036854775808L, 0L, new 
DateTime("1970-01-01"),
-            new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1970-01-01T00:00:00.0"), new Period("PT0S"), 
bytesZeros)
+            -9223372036854775808L, -9223372036854775808L, 0L, new Date(new 
DateTime("1970-01-01").getMillis()),
+            new Time(new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1970-01-01T00:00:00.0").getMillis()), 
new Period("PT0S"), bytesZeros)
         .build()
         .run();
   }
@@ -251,21 +254,21 @@ public void testLogicalIntTypes3() throws Exception {
         .ordered()
         .baselineColumns(columns)
         .baselineValues(1, "UTF8 string1", "RANDOM_VALUE", 1234567, 123, 
12345, 1234567, 123, 1234, 1234567,
-            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
DateTime("5350-02-17"),
-            new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1973-11-29T21:33:09.012"),
+            1234567890123456L, 1234567890123456L, 1234567890123456L, new 
Date(new DateTime("5350-02-17").getMillis()),
+            new Time(new DateTime(1234567, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1973-11-29T21:33:09.012").getMillis()),
             new 
Period().plusMonths(875770417).plusDays(943142453).plusMillis(1650536505),
             bytes12)
         .baselineValues(2, "UTF8 string2", "MAX_VALUE", 2147483647, 127, 
32767, 2147483647, 255, 65535, -1,
-            9223372036854775807L, 9223372036854775807L, -1L, new 
DateTime("1969-12-31"),
-            new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("2038-01-19T03:14:07.999"),
+            9223372036854775807L, 9223372036854775807L, -1L, new Date(new 
DateTime("1969-12-31").getMillis()),
+            new Time(new DateTime(0xFFFFFFFF, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("2038-01-19T03:14:07.999").getMillis()),
             new 
Period().plusMonths(16843009).plusDays(16843009).plusMillis(16843009),
             bytesOnes)
         .baselineValues(3, "UTF8 string3", "MIN_VALUE", -2147483648, -128, 
-32768, -2147483648, 0, 0, 0,
-            -9223372036854775808L, -9223372036854775808L, 0L, new 
DateTime("1970-01-01"),
-            new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()),
-            new DateTime("1970-01-01T00:00:00.0"), new Period("PT0S"), 
bytesZeros)
+            -9223372036854775808L, -9223372036854775808L, 0L, new Date(new 
DateTime("1970-01-01").getMillis()),
+            new Time(new DateTime(0, 
DateTimeZone.UTC).withZoneRetainFields(DateTimeZone.getDefault()).getMillis()),
+            new Timestamp(new DateTime("1970-01-01T00:00:00.0").getMillis()), 
new Period("PT0S"), bytesZeros)
         .baselineValues(4, null, null, null, null, null, null, null, null, 
null, null, null, null, null,
             null, null, null, null)
         .build().run();
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
index 8c419ce94e..c2d3f9aa01 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
@@ -80,7 +80,7 @@ public void testMongoExtendedTypes() throws Exception {
               1, actualRecordCount), 1, actualRecordCount);
       List<QueryDataBatch> resultList = 
testSqlWithResults(String.format("select * from dfs.`%s`", originalFile));
       String actual = getResultString(resultList, ",");
-      String expected = 
"drill_timestamp_millies,bin,bin1\n2015-07-07T03:59:43.488,drill,drill\n";
+      String expected = "drill_timestamp_millies,bin,bin1\n2015-07-07 
03:59:43.488,drill,drill\n";
       Assert.assertEquals(expected, actual);
     } finally {
       
resetSessionOption(ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName());
diff --git a/exec/java-exec/src/test/resources/baseline_nested_datetime.json 
b/exec/java-exec/src/test/resources/baseline_nested_datetime.json
new file mode 100644
index 0000000000..a055e9cbf9
--- /dev/null
+++ b/exec/java-exec/src/test/resources/baseline_nested_datetime.json
@@ -0,0 +1,13 @@
+{
+  "date" : "1970-01-11",
+  "time" : "00:00:03.600",
+  "timestamp" : "2018-03-23 17:40:52.123",
+  "date_list" : [ "1970-01-11" ],
+  "time_list" : [ "00:00:03.600" ],
+  "timestamp_list" : [ "2018-03-23 17:40:52.123" ],
+  "time_map" : {
+    "date" : "1970-01-11",
+    "time" : "00:00:03.600",
+    "timestamp" : "2018-03-23 17:40:52.123"
+  }
+}
\ No newline at end of file
diff --git a/exec/java-exec/src/test/resources/datetime.parquet 
b/exec/java-exec/src/test/resources/datetime.parquet
new file mode 100644
index 0000000000..8e5b877f1e
Binary files /dev/null and b/exec/java-exec/src/test/resources/datetime.parquet 
differ
diff --git a/exec/vector/src/main/codegen/data/ValueVectorTypes.tdd 
b/exec/vector/src/main/codegen/data/ValueVectorTypes.tdd
index 4d719b4f53..fe63e308af 100644
--- a/exec/vector/src/main/codegen/data/ValueVectorTypes.tdd
+++ b/exec/vector/src/main/codegen/data/ValueVectorTypes.tdd
@@ -68,7 +68,7 @@
         { class: "UInt4", valueHolder: "UInt4Holder" },
         { class: "Float4", javaType: "float" , boxedType: "Float", 
accessorType: "double", accessorCast: "set",
           fields: [{name: "value", type: "float"}]},
-        { class: "Time", javaType: "int", friendlyType: "DateTime", 
accessorType: "int" },
+        { class: "Time", javaType: "int", friendlyType: "Time", accessorType: 
"int" },
         { class: "IntervalYear", javaType: "int", friendlyType: "Period" }
         { class: "Decimal9", maxPrecisionDigits: 9, friendlyType: "BigDecimal",
           fields: [{name:"value", type:"int"}, {name: "scale", type: "int", 
include: false},
@@ -85,8 +85,8 @@
         { class: "BigInt"},
         { class: "UInt8" },
         { class: "Float8", javaType: "double" , boxedType: "Double", fields: 
[{name: "value", type: "double"}], },
-        { class: "Date", javaType: "long", friendlyType: "DateTime", 
accessorType: "long" },
-        { class: "TimeStamp", javaType: "long", friendlyType: "DateTime", 
accessorType: "long" }
+        { class: "Date", javaType: "long", friendlyType: "Date", accessorType: 
"long" },
+        { class: "TimeStamp", javaType: "long", friendlyType: "Timestamp", 
accessorType: "long" }
         { class: "Decimal18", maxPrecisionDigits: 18, friendlyType: 
"BigDecimal",
           fields: [{name:"value", type:"long"}, {name: "scale", type: "int", 
include: false},
                    {name: "precision", type: "int", include: false}] },
diff --git a/exec/vector/src/main/codegen/templates/AbstractFieldReader.java 
b/exec/vector/src/main/codegen/templates/AbstractFieldReader.java
index f2e1eb0dd8..6ad879c0b4 100644
--- a/exec/vector/src/main/codegen/templates/AbstractFieldReader.java
+++ b/exec/vector/src/main/codegen/templates/AbstractFieldReader.java
@@ -44,7 +44,7 @@ public boolean isSet() {
   }
 
   <#list ["Object", "BigDecimal", "Integer", "Long", "Boolean",
-          "Character", "DateTime", "Period", "Double", "Float",
+          "Character", "Date", "Time", "Timestamp", "Period", "Double", 
"Float",
           "Text", "String", "Byte", "Short", "byte[]"] as friendlyType>
   <#assign safeType=friendlyType />
   <#if safeType=="byte[]"><#assign safeType="ByteArray" /></#if>
diff --git a/exec/vector/src/main/codegen/templates/FixedValueVectors.java 
b/exec/vector/src/main/codegen/templates/FixedValueVectors.java
index ddd69253ae..6f8119a309 100644
--- a/exec/vector/src/main/codegen/templates/FixedValueVectors.java
+++ b/exec/vector/src/main/codegen/templates/FixedValueVectors.java
@@ -508,7 +508,7 @@ public long getTwoAsLong(int index) {
     public ${friendlyType} getObject(int index) {
       org.joda.time.DateTime date = new org.joda.time.DateTime(get(index), 
org.joda.time.DateTimeZone.UTC);
       date = 
date.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
-      return date;
+      return new java.sql.Date(date.getMillis());
     }
     <#elseif minor.class == "TimeStamp">
 
@@ -516,7 +516,7 @@ public long getTwoAsLong(int index) {
     public ${friendlyType} getObject(int index) {
       org.joda.time.DateTime date = new org.joda.time.DateTime(get(index), 
org.joda.time.DateTimeZone.UTC);
       date = 
date.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
-      return date;
+      return new java.sql.Timestamp(date.getMillis());
     }
     <#elseif minor.class == "IntervalYear">
 
@@ -531,10 +531,10 @@ public StringBuilder getAsStringBuilder(int index) {
     <#elseif minor.class == "Time">
 
     @Override
-    public DateTime getObject(int index) {
+    public java.sql.Time getObject(int index) {
       org.joda.time.DateTime time = new org.joda.time.DateTime(get(index), 
org.joda.time.DateTimeZone.UTC);
       time = 
time.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
-      return time;
+      return new java.sql.Time(time.getMillis());
     }
     <#elseif minor.class == "Decimal9" || minor.class == "Decimal18">
 
diff --git a/exec/vector/src/main/codegen/templates/NullReader.java 
b/exec/vector/src/main/codegen/templates/NullReader.java
index 32ee9b9b9a..b4a3fc48cc 100644
--- a/exec/vector/src/main/codegen/templates/NullReader.java
+++ b/exec/vector/src/main/codegen/templates/NullReader.java
@@ -121,7 +121,7 @@ private void fail(String name){
   }
   
   <#list ["Object", "BigDecimal", "Integer", "Long", "Boolean", 
-          "Character", "DateTime", "Period", "Double", "Float",
+          "Character", "Date", "Time", "Timestamp", "Period", "Double", 
"Float",
           "Text", "String", "Byte", "Short", "byte[]"] as friendlyType>
   <#assign safeType=friendlyType />
   <#if safeType=="byte[]"><#assign safeType="ByteArray" /></#if>
diff --git a/exec/vector/src/main/codegen/templates/UnionReader.java 
b/exec/vector/src/main/codegen/templates/UnionReader.java
index 40ad89b82a..d4657add06 100644
--- a/exec/vector/src/main/codegen/templates/UnionReader.java
+++ b/exec/vector/src/main/codegen/templates/UnionReader.java
@@ -124,7 +124,7 @@ public void copyAsValue(UnionWriter writer) {
   }
 
   <#list ["Object", "BigDecimal", "Integer", "Long", "Boolean",
-          "Character", "DateTime", "Period", "Double", "Float",
+          "Character", "Date", "Time", "Timestamp", "Period", "Double", 
"Float",
           "Text", "String", "Byte", "Short", "byte[]"] as friendlyType>
   <#assign safeType=friendlyType />
   <#if safeType=="byte[]"><#assign safeType="ByteArray" /></#if>
diff --git 
a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java 
b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java
index 216c5cc736..695befdb85 100644
--- 
a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java
+++ 
b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java
@@ -29,6 +29,7 @@
 
   static {
     mapper = new ObjectMapper();
+    mapper.registerModule(SerializationModule.getModule());
   }
 
   @Override
diff --git 
a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java 
b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java
index e4de6d8844..f4352d1144 100644
--- 
a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java
+++ 
b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java
@@ -34,6 +34,7 @@
 
   static {
     mapper = new ObjectMapper();
+    mapper.registerModule(SerializationModule.getModule());
   }
 
   @Override
diff --git 
a/exec/vector/src/main/java/org/apache/drill/exec/util/SerializationModule.java 
b/exec/vector/src/main/java/org/apache/drill/exec/util/SerializationModule.java
new file mode 100644
index 0000000000..990a58bf81
--- /dev/null
+++ 
b/exec/vector/src/main/java/org/apache/drill/exec/util/SerializationModule.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.util;
+
+import java.io.IOException;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+
+/**
+ * This helper class holds any custom Jackson serializers used when outputing
+ * the data in JSON format.
+ */
+public class SerializationModule {
+
+    // copied from DateUtility.  Added here for inclusion into drill-jdbc-all
+    public static final DateTimeFormatter formatDate        = 
DateTimeFormat.forPattern("yyyy-MM-dd");
+    public static final DateTimeFormatter formatTimeStamp    = 
DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS");
+    public static final DateTimeFormatter formatTimeStampTZ = 
DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS ZZZ");
+    public static final DateTimeFormatter formatTime        = 
DateTimeFormat.forPattern("HH:mm:ss.SSS");
+
+    public static final SimpleModule drillModule = new 
SimpleModule("DrillModule");
+
+    static {
+        drillModule.addSerializer(Time.class, new JsonSerializer<Time>() {
+            @Override
+            public void serialize(Time value, JsonGenerator gen, 
SerializerProvider serializers)
+                    throws IOException, JsonProcessingException {
+                gen.writeString(formatTime.print(value.getTime()));
+            }
+        });
+
+        drillModule.addSerializer(Date.class, new JsonSerializer<Date>() {
+            @Override
+            public void serialize(Date value, JsonGenerator gen, 
SerializerProvider serializers)
+                    throws IOException, JsonProcessingException {
+                gen.writeString(formatDate.print(value.getTime()));
+            }
+        });
+
+        drillModule.addSerializer(Timestamp.class, new 
JsonSerializer<Timestamp>() {
+            @Override
+            public void serialize(Timestamp value, JsonGenerator gen, 
SerializerProvider serializers)
+                    throws IOException, JsonProcessingException {
+                gen.writeString(formatTimeStamp.print(value.getTime()));
+            }
+        });
+    }
+
+    public static final SimpleModule getModule() {
+        return drillModule;
+    }
+}


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


> Output format for nested date, time, timestamp values in an object hierarchy
> ----------------------------------------------------------------------------
>
>                 Key: DRILL-6242
>                 URL: https://issues.apache.org/jira/browse/DRILL-6242
>             Project: Apache Drill
>          Issue Type: Bug
>          Components: Execution - Data Types
>    Affects Versions: 1.12.0
>            Reporter: Jiang Wu
>            Assignee: Jiang Wu
>            Priority: Major
>              Labels: ready-to-commit
>             Fix For: 1.14.0
>
>
> Some storages (mapr db, mongo db, etc.) have hierarchical objects that 
> contain nested fields of date, time, timestamp types.  When a query returns 
> these objects, the output format for the nested date, time, timestamp, are 
> showing the internal object (org.joda.time.DateTime), rather than the logical 
> data value.
> For example.  Suppose in MongoDB, we have a single object that looks like 
> this:
> {code:java}
> > db.test.findOne();
> {
>     "_id" : ObjectId("5aa8487d470dd39a635a12f5"),
>     "name" : "orange",
>     "context" : {
>         "date" : ISODate("2018-03-13T21:52:54.940Z"),
>         "user" : "jack"
>     }
> }
> {code}
> Then connect Drill to the above MongoDB storage, and run the following query 
> within Drill:
> {code:java}
> > select t.context.`date`, t.context from test t; 
> +--------+---------+ 
> | EXPR$0 | context | 
> +--------+---------+ 
> | 2018-03-13 | 
> {"date":{"dayOfYear":72,"year":2018,"dayOfMonth":13,"dayOfWeek":2,"era":1,"millisOfDay":78774940,"weekOfWeekyear":11,"weekyear":2018,"monthOfYear":3,"yearOfEra":2018,"yearOfCentury":18,"centuryOfEra":20,"millisOfSecond":940,"secondOfMinute":54,"secondOfDay":78774,"minuteOfHour":52,"minuteOfDay":1312,"hourOfDay":21,"zone":{"fixed":true,"id":"UTC"},"millis":1520977974940,"chronology":{"zone":{"fixed":true,"id":"UTC"}},"afterNow":false,"beforeNow":true,"equalNow":false},"user":"jack"}
>  |
> {code}
> We can see that from the above output, when the date field is retrieved as a 
> top level column, Drill outputs a logical date value.  But when the same 
> field is within an object hierarchy, Drill outputs the internal object used 
> to hold the date value.
> The expected output is the same display for whether the date field is shown 
> as a top level column or when it is within an object hierarchy:
> {code:java}
> > select t.context.`date`, t.context from test t; 
> +--------+---------+ 
> | EXPR$0 | context | 
> +--------+---------+ 
> | 2018-03-13 | {"date":"2018-03-13","user":"jack"} |
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to