Author: hashutosh
Date: Mon Feb 2 17:04:41 2015
New Revision: 1656519
URL: http://svn.apache.org/r1656519
Log:
HIVE-9298 : Support reading alternate timestamp formats (Jason Dere via
Ashutosh Chauhan)
Added:
hive/trunk/common/src/java/org/apache/hive/common/util/TimestampParser.java
hive/trunk/common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java
hive/trunk/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
hive/trunk/data/files/ts_formats.txt
hive/trunk/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
hive/trunk/hbase-handler/src/test/results/positive/hbase_timestamp_format.q.out
hive/trunk/ql/src/test/queries/clientpositive/timestamp_formats.q
hive/trunk/ql/src/test/results/clientpositive/timestamp_formats.q.out
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyObjectInspectorParameters.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyObjectInspectorParametersImpl.java
Modified:
hive/trunk/common/pom.xml
hive/trunk/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
hive/trunk/pom.xml
hive/trunk/ql/pom.xml
hive/trunk/serde/if/serde.thrift
hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
hive/trunk/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
hive/trunk/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
hive/trunk/serde/src/gen/thrift/gen-rb/serde_constants.rb
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazySimpleStructObjectInspector.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
Modified: hive/trunk/common/pom.xml
URL:
http://svn.apache.org/viewvc/hive/trunk/common/pom.xml?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/common/pom.xml (original)
+++ hive/trunk/common/pom.xml Mon Feb 2 17:04:41 2015
@@ -56,6 +56,11 @@
<version>${commons-logging.version}</version>
</dependency>
<dependency>
+ <groupId>joda-time</groupId>
+ <artifactId>joda-time</artifactId>
+ <version>${joda.version}</version>
+ </dependency>
+ <dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
Modified:
hive/trunk/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
URL:
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hive/common/util/HiveStringUtils.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
(original)
+++ hive/trunk/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
Mon Feb 2 17:04:41 2015
@@ -45,6 +45,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.StringUtils;
/**
* HiveStringUtils
@@ -492,6 +493,34 @@ public class HiveStringUtils {
}
/**
+ * Split a string using the default separator/escape character,
+ * then unescape the resulting array of strings
+ * @param str
+ * @return an array of unescaped strings
+ */
+ public static String[] splitAndUnEscape(String str) {
+ return splitAndUnEscape(str, ESCAPE_CHAR, COMMA);
+ }
+
+ /**
+ * Split a string using the specified separator/escape character,
+ * then unescape the resulting array of strings using the same
escape/separator.
+ * @param str a string that may have escaped separator
+ * @param escapeChar a char that be used to escape the separator
+ * @param separator a separator char
+ * @return an array of unescaped strings
+ */
+ public static String[] splitAndUnEscape(String str, char escapeChar, char
separator) {
+ String[] result = split(str, escapeChar, separator);
+ if (result != null) {
+ for (int idx = 0; idx < result.length; ++idx) {
+ result[idx] = unEscapeString(result[idx], escapeChar, separator);
+ }
+ }
+ return result;
+ }
+
+ /**
* Finds the first occurrence of the separator character ignoring the escaped
* separators starting from the index. Note the substring between the index
* and the position of the separator is passed.
Added:
hive/trunk/common/src/java/org/apache/hive/common/util/TimestampParser.java
URL:
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hive/common/util/TimestampParser.java?rev=1656519&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hive/common/util/TimestampParser.java
(added)
+++ hive/trunk/common/src/java/org/apache/hive/common/util/TimestampParser.java
Mon Feb 2 17:04:41 2015
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.math.BigDecimal;
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.joda.time.DateTime;
+import org.joda.time.MutableDateTime;
+import org.joda.time.DateTimeFieldType;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+import org.joda.time.format.DateTimeFormatterBuilder;
+import org.joda.time.format.DateTimeParser;
+import org.joda.time.format.DateTimeParserBucket;
+
+/**
+ * Timestamp parser using Joda DateTimeFormatter. Parser accepts 0 or more
date time format
+ * patterns. If no format patterns are provided it will default to the normal
Timestamp parsing.
+ * Datetime formats are compatible with Java SimpleDateFormat. Also added
special case pattern
+ * "millis" to parse the string as milliseconds since Unix epoch.
+ * Since this uses Joda DateTimeFormatter, this parser should be thread safe.
+ */
+public class TimestampParser {
+
+ protected final static String[] stringArray = new String[] {};
+ protected final static String millisFormatString = "millis";
+ protected final static DateTime startingDateValue = new DateTime(1970, 1, 1,
0, 0, 0, 0);
+
+ protected String[] formatStrings = null;
+ protected DateTimeFormatter fmt = null;
+
+ public TimestampParser() {
+ }
+
+ public TimestampParser(TimestampParser tsParser) {
+ this(tsParser.formatStrings == null ?
+ null : Arrays.copyOf(tsParser.formatStrings,
tsParser.formatStrings.length));
+ }
+
+ public TimestampParser(List<String> formatStrings) {
+ this(formatStrings == null ? null : formatStrings.toArray(stringArray));
+ }
+
+ public TimestampParser(String[] formatStrings) {
+ this.formatStrings = formatStrings;
+
+ // create formatter that includes all of the input patterns
+ if (formatStrings != null && formatStrings.length > 0) {
+ DateTimeParser[] parsers = new DateTimeParser[formatStrings.length];
+ for (int idx = 0; idx < formatStrings.length; ++idx) {
+ String formatString = formatStrings[idx];
+ if (formatString.equalsIgnoreCase(millisFormatString)) {
+ // Use milliseconds parser if pattern matches our special-case
millis pattern string
+ parsers[idx] = new MillisDateFormatParser();
+ } else {
+ parsers[idx] = DateTimeFormat.forPattern(formatString).getParser();
+ }
+ }
+ fmt = new DateTimeFormatterBuilder().append(null, parsers).toFormatter();
+ }
+ }
+
+ /**
+ * Parse the input string and return a timestamp value
+ * @param strValue
+ * @return
+ * @throws IllegalArgumentException if input string cannot be parsed into
timestamp
+ */
+ public Timestamp parseTimestamp(String strValue) throws
IllegalArgumentException {
+ if (fmt != null) {
+ // reset value in case any date fields are missing from the date pattern
+ MutableDateTime mdt = new MutableDateTime(startingDateValue);
+
+ // Using parseInto() avoids throwing exception when parsing,
+ // allowing fallback to default timestamp parsing if custom patterns
fail.
+ int ret = fmt.parseInto(mdt, strValue, 0);
+ // Only accept parse results if we parsed the entire string
+ if (ret == strValue.length()) {
+ return new Timestamp(mdt.getMillis());
+ }
+ }
+
+ // Otherwise try default timestamp parsing
+ return Timestamp.valueOf(strValue);
+ }
+
+ /**
+ * DateTimeParser to parse the date string as the millis since Unix epoch
+ */
+ public static class MillisDateFormatParser implements DateTimeParser {
+ private static final ThreadLocal<Matcher> numericMatcher = new
ThreadLocal<Matcher>() {
+ @Override
+ protected Matcher initialValue() {
+ return Pattern.compile("(-?\\d+)(\\.\\d+)?$").matcher("");
+ }
+ };
+
+ private final static DateTimeFieldType[] dateTimeFields = {
+ DateTimeFieldType.year(),
+ DateTimeFieldType.monthOfYear(),
+ DateTimeFieldType.dayOfMonth(),
+ DateTimeFieldType.hourOfDay(),
+ DateTimeFieldType.minuteOfHour(),
+ DateTimeFieldType.secondOfMinute(),
+ DateTimeFieldType.millisOfSecond()
+ };
+
+ public int estimateParsedLength() {
+ return 13; // Shouldn't hit 14 digits until year 2286
+ }
+
+ public int parseInto(DateTimeParserBucket bucket, String text, int
position) {
+ String substr = text.substring(position);
+ Matcher matcher = numericMatcher.get();
+ matcher.reset(substr);
+ if (!matcher.matches()) {
+ return -1;
+ }
+
+ // Joda DateTime only has precision to millis, cut off any fractional
portion
+ long millis = Long.parseLong(matcher.group(1));
+ DateTime dt = new DateTime(millis);
+ for (DateTimeFieldType field : dateTimeFields) {
+ bucket.saveField(field, dt.get(field));
+ }
+ return substr.length();
+ }
+ }
+}
Added:
hive/trunk/common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java
URL:
http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java?rev=1656519&view=auto
==============================================================================
---
hive/trunk/common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java
(added)
+++
hive/trunk/common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java
Mon Feb 2 17:04:41 2015
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import static org.junit.Assert.*;
+
+import java.util.Arrays;
+
+import org.junit.Test;
+
+public class TestHiveStringUtils {
+ @Test
+ public void testSplitAndUnEscape() throws Exception {
+ splitAndUnEscapeTestCase(
+ null, null);
+
+ splitAndUnEscapeTestCase(
+ "'single element'",
+ new String[] {
+ "'single element'"
+ });
+
+ splitAndUnEscapeTestCase(
+ "yyyy-MM-dd'T'HH:mm:ss,yyyy-MM-dd'T'HH:mm:ss.S",
+ new String[] {
+ "yyyy-MM-dd'T'HH:mm:ss",
+ "yyyy-MM-dd'T'HH:mm:ss.S"
+ });
+
+ splitAndUnEscapeTestCase(
+ "single\\,element",
+ new String[] {
+ "single,element"
+ });
+ splitAndUnEscapeTestCase(
+ "element\\,one\\\\,element\\\\two\\\\\\,",
+ new String[] {
+ "element,one\\",
+ "element\\two\\,"
+ });
+ }
+
+ public void splitAndUnEscapeTestCase(String testValue, String[]
expectedResults) throws Exception {
+ String[] testResults = HiveStringUtils.splitAndUnEscape(testValue);
+ assertTrue(Arrays.toString(expectedResults) + " == " +
Arrays.toString(testResults),
+ Arrays.equals(expectedResults, testResults));
+ }
+}
Added:
hive/trunk/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
URL:
http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hive/common/util/TestTimestampParser.java?rev=1656519&view=auto
==============================================================================
---
hive/trunk/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
(added)
+++
hive/trunk/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
Mon Feb 2 17:04:41 2015
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common.util;
+
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.*;
+import org.junit.Test;
+
+public class TestTimestampParser {
+ public static class ValidTimestampCase {
+ String strValue;
+ Timestamp expectedValue;
+
+ public ValidTimestampCase(String strValue, Timestamp expectedValue) {
+ this.strValue = strValue;
+ this.expectedValue = expectedValue;
+ }
+ }
+
+ static void testValidCases(TimestampParser tp, ValidTimestampCase[]
validCases) {
+ for (ValidTimestampCase validCase : validCases) {
+ Timestamp ts = tp.parseTimestamp(validCase.strValue);
+ assertEquals("Parsing " + validCase.strValue, validCase.expectedValue,
ts);
+ }
+ }
+
+ static void testInvalidCases(TimestampParser tp, String[] invalidCases) {
+ for (String invalidString : invalidCases) {
+ try {
+ Timestamp ts = tp.parseTimestamp(invalidString);
+ fail("Expected exception parsing " + invalidString + ", but parsed
value to " + ts);
+ } catch (IllegalArgumentException err) {
+ // Exception expected
+ }
+ }
+ }
+
+ @Test
+ public void testDefault() {
+ // No timestamp patterns, should default to normal timestamp format
+ TimestampParser tp = new TimestampParser();
+ ValidTimestampCase[] validCases = {
+ new ValidTimestampCase("1945-12-31 23:59:59.0",
+ Timestamp.valueOf("1945-12-31 23:59:59.0")),
+ new ValidTimestampCase("1945-12-31 23:59:59.1234",
+ Timestamp.valueOf("1945-12-31 23:59:59.1234")),
+ new ValidTimestampCase("1970-01-01 00:00:00",
+ Timestamp.valueOf("1970-01-01 00:00:00")),
+ };
+
+ String[] invalidCases = {
+ "1945-12-31T23:59:59",
+ "12345",
+ };
+
+ testValidCases(tp, validCases);
+ testInvalidCases(tp, invalidCases);
+ }
+
+ @Test
+ public void testPattern1() {
+ // Joda pattern matching expects fractional seconds length to match
+ // the number of 'S' in the pattern. So if you want to match .1, .12, .123,
+ // you need 3 different patterns with .S, .SS, .SSS
+ String[] patterns = {
+ // ISO-8601 timestamps
+ "yyyy-MM-dd'T'HH:mm:ss",
+ "yyyy-MM-dd'T'HH:mm:ss.S",
+ "yyyy-MM-dd'T'HH:mm:ss.SS",
+ "yyyy-MM-dd'T'HH:mm:ss.SSS",
+ "yyyy-MM-dd'T'HH:mm:ss.SSSS",
+ };
+ TimestampParser tp = new TimestampParser(patterns);
+
+ ValidTimestampCase[] validCases = {
+ new ValidTimestampCase("1945-12-31T23:59:59.0",
+ Timestamp.valueOf("1945-12-31 23:59:59.0")),
+ new ValidTimestampCase("2001-01-01 00:00:00.100",
+ Timestamp.valueOf("2001-01-01 00:00:00.100")),
+ new ValidTimestampCase("2001-01-01 00:00:00.001",
+ Timestamp.valueOf("2001-01-01 00:00:00.001")),
+ // Joda parsing only supports up to millisecond precision
+ new ValidTimestampCase("1945-12-31T23:59:59.1234",
+ Timestamp.valueOf("1945-12-31 23:59:59.123")),
+ new ValidTimestampCase("1970-01-01T00:00:00",
+ Timestamp.valueOf("1970-01-01 00:00:00")),
+ new ValidTimestampCase("1970-4-5T6:7:8",
+ Timestamp.valueOf("1970-04-05 06:07:08")),
+
+ // Default timestamp format still works?
+ new ValidTimestampCase("2001-01-01 00:00:00",
+ Timestamp.valueOf("2001-01-01 00:00:00")),
+ new ValidTimestampCase("1945-12-31 23:59:59.1234",
+ Timestamp.valueOf("1945-12-31 23:59:59.1234")),
+ };
+
+ String[] invalidCases = {
+ "1945-12-31-23:59:59",
+ "1945-12-31T23:59:59.12345", // our pattern didn't specify 5 decimal
places
+ "12345",
+ };
+
+ testValidCases(tp, validCases);
+ testInvalidCases(tp, invalidCases);
+ }
+
+ @Test
+ public void testMillisParser() {
+ String[] patterns = {
+ "millis",
+ // Also try other patterns
+ "yyyy-MM-dd'T'HH:mm:ss",
+ };
+ TimestampParser tp = new TimestampParser(patterns);
+
+ ValidTimestampCase[] validCases = {
+ new ValidTimestampCase("0", new Timestamp(0)),
+ new ValidTimestampCase("-1000000", new Timestamp(-1000000)),
+ new ValidTimestampCase("1420509274123", new Timestamp(1420509274123L)),
+ new ValidTimestampCase("1420509274123.456789", new
Timestamp(1420509274123L)),
+
+ // Other format pattern should also work
+ new ValidTimestampCase("1945-12-31T23:59:59",
+ Timestamp.valueOf("1945-12-31 23:59:59")),
+ };
+
+ String[] invalidCases = {
+ "1945-12-31-23:59:59",
+ "1945-12-31T23:59:59.12345", // our pattern didn't specify 5 decimal
places
+ "1420509274123-",
+ };
+
+ testValidCases(tp, validCases);
+ testInvalidCases(tp, invalidCases);
+ }
+
+ @Test
+ public void testPattern2() {
+ // Pattern does not contain all date fields
+ String[] patterns = {
+ "HH:mm",
+ "MM:dd:ss",
+ };
+ TimestampParser tp = new TimestampParser(patterns);
+
+ ValidTimestampCase[] validCases = {
+ new ValidTimestampCase("05:06",
+ Timestamp.valueOf("1970-01-01 05:06:00")),
+ new ValidTimestampCase("05:06:07",
+ Timestamp.valueOf("1970-05-06 00:00:07")),
+ };
+
+ String[] invalidCases = {
+ "1945-12-31T23:59:59",
+ "1945:12:31-",
+ "12345",
+ };
+
+ testValidCases(tp, validCases);
+ testInvalidCases(tp, invalidCases);
+ }
+}
Added: hive/trunk/data/files/ts_formats.txt
URL:
http://svn.apache.org/viewvc/hive/trunk/data/files/ts_formats.txt?rev=1656519&view=auto
==============================================================================
--- hive/trunk/data/files/ts_formats.txt (added)
+++ hive/trunk/data/files/ts_formats.txt Mon Feb 2 17:04:41 2015
@@ -0,0 +1,21 @@
+2011-01-01 01:01:01.1111111112011-01-01
01:01:01.1111111112011-01-01T01:01:01.1111111112011-01-01T01:01:01.1111111112011-01-01T01:01:012011-01-01T01:01:01
+2012-02-02 02:02:02.2222222222012-02-02
02:02:02.2222222222012-02-02T02:02:02.2222222222012-02-02T02:02:02.2222222222012-02-02T02:02:022012-02-02T02:02:02
+2013-03-03 03:03:03.3333333332013-03-03
03:03:03.3333333332013-03-03T03:03:03.3333333332013-03-03T03:03:03.3333333332013-03-03T03:03:032013-03-03T03:03:03
+2014-04-04 04:04:04.4444444442014-04-04
04:04:04.4444444442014-04-04T04:04:04.4444444442014-04-04T04:04:04.4444444442014-04-04T04:04:042014-04-04T04:04:04
+2015-05-05 05:05:05.5555555552015-05-05
05:05:05.5555555552015-05-05T05:05:05.5555555552015-05-05T05:05:05.5555555552015-05-05T05:05:052015-05-05T05:05:05
+2016-06-06 06:06:06.6666666662016-06-06
06:06:06.6666666662016-06-06T06:06:06.6666666662016-06-06T06:06:06.6666666662016-06-06T06:06:062016-06-06T06:06:06
+2017-07-07 07:07:07.7777777772017-07-07
07:07:07.7777777772017-07-07T07:07:07.7777777772017-07-07T07:07:07.7777777772017-07-07T07:07:072017-07-07T07:07:07
+2018-08-08 08:08:08.8888888882018-08-08
08:08:08.8888888882018-08-08T08:08:08.8888888882018-08-08T08:08:08.8888888882018-08-08T08:08:082018-08-08T08:08:08
+2019-09-09 09:09:09.9999999992019-09-09
09:09:09.9999999992019-09-09T09:09:09.9999999992019-09-09T09:09:09.9999999992019-09-09T09:09:092019-09-09T09:09:09
+2020-10-10 10:10:10.1010101012020-10-10
10:10:10.1010101012020-10-10T10:10:10.1010101012020-10-10T10:10:10.1010101012020-10-10T10:10:102020-10-10T10:10:10
+2021-11-11 11:11:11.1111111112021-11-11
11:11:11.1111111112021-11-11T11:11:11.1111111112021-11-11T11:11:11.1111111112021-11-11T11:11:112021-11-11T11:11:11
+2022-12-12 12:12:12.1212121212022-12-12
12:12:12.1212121212022-12-12T12:12:12.1212121212022-12-12T12:12:12.1212121212022-12-12T12:12:122022-12-12T12:12:12
+2023-01-02 13:13:13.1313131312023-01-02
13:13:13.1313131312023-01-02T13:13:13.1313131312023-01-02T13:13:13.1313131312023-01-02T13:13:132023-01-02T13:13:13
+2024-02-02 14:14:14.1414141412024-02-02
14:14:14.1414141412024-02-02T14:14:14.1414141412024-02-02T14:14:14.1414141412024-02-02T14:14:142024-02-02T14:14:14
+2025-03-03 15:15:15.1515151512025-03-03
15:15:15.1515151512025-03-03T15:15:15.1515151512025-03-03T15:15:15.1515151512025-03-03T15:15:152025-03-03T15:15:15
+2026-04-04 16:16:16.1616161612026-04-04
16:16:16.1616161612026-04-04T16:16:16.1616161612026-04-04T16:16:16.1616161612026-04-04T16:16:162026-04-04T16:16:16
+2027-05-05 17:17:17.1717171712027-05-05
17:17:17.1717171712027-05-05T17:17:17.1717171712027-05-05T17:17:17.1717171712027-05-05T17:17:172027-05-05T17:17:17
+2028-06-06 18:18:18.1818181812028-06-06
18:18:18.1818181812028-06-06T18:18:18.1818181812028-06-06T18:18:18.1818181812028-06-06T18:18:182028-06-06T18:18:18
+2029-07-07 19:19:19.1919191912029-07-07
19:19:19.1919191912029-07-07T19:19:19.1919191912029-07-07T19:19:19.1919191912029-07-07T19:19:192029-07-07T19:19:19
+2030-08-08 20:20:20.2020202022030-08-08
20:20:20.2020202022030-08-08T20:20:20.2020202022030-08-08T20:20:20.2020202022030-08-08T20:20:202030-08-08T20:20:20
+2031-09-09 21:21:21.2121212122031-09-09
21:21:21.2121212122031-09-09T21:21:21.2121212122031-09-09T21:21:21.2121212122031-09-09T21:21:212031-09-09T21:21:21
Modified:
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
(original)
+++
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
Mon Feb 2 17:04:41 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.serde2.laz
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
public class DefaultHBaseKeyFactory extends AbstractHBaseKeyFactory implements
HBaseKeyFactory {
@@ -44,8 +45,7 @@ public class DefaultHBaseKeyFactory exte
@Override
public ObjectInspector createKeyObjectInspector(TypeInfo type) throws
SerDeException {
- return LazyFactory.createLazyObjectInspector(type,
serdeParams.getSeparators(), 1,
- serdeParams.getNullSequence(), serdeParams.isEscaped(),
serdeParams.getEscapeChar());
+ return LazyFactory.createLazyObjectInspector(type, 1, serdeParams,
ObjectInspectorOptions.JAVA);
}
@Override
Modified:
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java
(original)
+++
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java
Mon Feb 2 17:04:41 2015
@@ -25,8 +25,12 @@ import org.apache.hadoop.hive.hbase.stru
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
+import
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
+import
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.Text;
// Does same thing with LazyFactory#createLazyObjectInspector except that this
replaces
// original keyOI with OI which is create by HBaseKeyFactory provided by serde
property for hbase
@@ -46,8 +50,7 @@ public class HBaseLazyObjectFactory {
}
}
return LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(
- serdeParams.getColumnNames(), columnObjectInspectors,
serdeParams.getSeparators()[0],
- serdeParams.getNullSequence(), serdeParams.isLastColumnTakesRest(),
- serdeParams.isEscaped(), serdeParams.getEscapeChar());
+ serdeParams.getColumnNames(), columnObjectInspectors, null,
serdeParams.getSeparators()[0],
+ serdeParams, ObjectInspectorOptions.JAVA);
}
}
\ No newline at end of file
Modified:
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
(original)
+++
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
Mon Feb 2 17:04:41 2015
@@ -74,9 +74,7 @@ public class AvroHBaseValueFactory exten
@Override
public ObjectInspector createValueObjectInspector(TypeInfo type) throws
SerDeException {
ObjectInspector oi =
- LazyFactory.createLazyObjectInspector(type,
serdeParams.getSeparators(), 1,
- serdeParams.getNullSequence(), serdeParams.isEscaped(),
serdeParams.getEscapeChar(),
- ObjectInspectorOptions.AVRO);
+ LazyFactory.createLazyObjectInspector(type, 1, serdeParams,
ObjectInspectorOptions.AVRO);
// initialize the object inspectors
initInternalObjectInspectors(oi);
Modified:
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
(original)
+++
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
Mon Feb 2 17:04:41 2015
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.serde2.laz
import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -62,8 +63,8 @@ public class DefaultHBaseValueFactory im
@Override
public ObjectInspector createValueObjectInspector(TypeInfo type)
throws SerDeException {
- return LazyFactory.createLazyObjectInspector(type,
serdeParams.getSeparators(),
- 1, serdeParams.getNullSequence(), serdeParams.isEscaped(),
serdeParams.getEscapeChar());
+ return LazyFactory.createLazyObjectInspector(type,
+ 1, serdeParams, ObjectInspectorOptions.JAVA);
}
@Override
Added:
hive/trunk/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
URL:
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q?rev=1656519&view=auto
==============================================================================
--- hive/trunk/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
(added)
+++ hive/trunk/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
Mon Feb 2 17:04:41 2015
@@ -0,0 +1,21 @@
+
+create table hbase_str(rowkey string,mytime string,mystr string)
+ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+ WITH SERDEPROPERTIES ('hbase.columns.mapping' = 'm:mytime,m:mystr')
+ TBLPROPERTIES ('hbase.table.name' = 'hbase_ts');
+
+describe hbase_str;
+insert overwrite table hbase_str select key, '2001-02-03-04.05.06.123456',
value from src limit 3;
+select * from hbase_str;
+
+-- Timestamp string does not match the default timestamp format, specify a
custom timestamp format
+create external table hbase_ts(rowkey string,mytime timestamp,mystr string)
+ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+ WITH SERDEPROPERTIES ('hbase.columns.mapping' = 'm:mytime,m:mystr',
'timestamp.formats' = 'yyyy-MM-dd-HH.mm.ss.SSSSSS')
+ TBLPROPERTIES ('hbase.table.name' = 'hbase_ts');
+
+describe hbase_ts;
+select * from hbase_ts;
+
+drop table hbase_str;
+drop table hbase_ts;
Added:
hive/trunk/hbase-handler/src/test/results/positive/hbase_timestamp_format.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/positive/hbase_timestamp_format.q.out?rev=1656519&view=auto
==============================================================================
---
hive/trunk/hbase-handler/src/test/results/positive/hbase_timestamp_format.q.out
(added)
+++
hive/trunk/hbase-handler/src/test/results/positive/hbase_timestamp_format.q.out
Mon Feb 2 17:04:41 2015
@@ -0,0 +1,94 @@
+PREHOOK: query: create table hbase_str(rowkey string,mytime string,mystr
string)
+ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+ WITH SERDEPROPERTIES ('hbase.columns.mapping' = 'm:mytime,m:mystr')
+ TBLPROPERTIES ('hbase.table.name' = 'hbase_ts')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_str
+POSTHOOK: query: create table hbase_str(rowkey string,mytime string,mystr
string)
+ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+ WITH SERDEPROPERTIES ('hbase.columns.mapping' = 'm:mytime,m:mystr')
+ TBLPROPERTIES ('hbase.table.name' = 'hbase_ts')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hbase_str
+PREHOOK: query: describe hbase_str
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@hbase_str
+POSTHOOK: query: describe hbase_str
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@hbase_str
+rowkey string from deserializer
+mytime string from deserializer
+mystr string from deserializer
+PREHOOK: query: insert overwrite table hbase_str select key,
'2001-02-03-04.05.06.123456', value from src limit 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@hbase_str
+POSTHOOK: query: insert overwrite table hbase_str select key,
'2001-02-03-04.05.06.123456', value from src limit 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@hbase_str
+PREHOOK: query: select * from hbase_str
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hbase_str
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hbase_str
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hbase_str
+#### A masked pattern was here ####
+238 2001-02-03-04.05.06.123456 val_238
+311 2001-02-03-04.05.06.123456 val_311
+86 2001-02-03-04.05.06.123456 val_86
+PREHOOK: query: -- Timestamp string does not match the default timestamp
format, specify a custom timestamp format
+create external table hbase_ts(rowkey string,mytime timestamp,mystr string)
+ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+ WITH SERDEPROPERTIES ('hbase.columns.mapping' = 'm:mytime,m:mystr',
'timestamp.formats' = 'yyyy-MM-dd-HH.mm.ss.SSSSSS')
+ TBLPROPERTIES ('hbase.table.name' = 'hbase_ts')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hbase_ts
+POSTHOOK: query: -- Timestamp string does not match the default timestamp
format, specify a custom timestamp format
+create external table hbase_ts(rowkey string,mytime timestamp,mystr string)
+ STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+ WITH SERDEPROPERTIES ('hbase.columns.mapping' = 'm:mytime,m:mystr',
'timestamp.formats' = 'yyyy-MM-dd-HH.mm.ss.SSSSSS')
+ TBLPROPERTIES ('hbase.table.name' = 'hbase_ts')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hbase_ts
+PREHOOK: query: describe hbase_ts
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@hbase_ts
+POSTHOOK: query: describe hbase_ts
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@hbase_ts
+rowkey string from deserializer
+mytime timestamp from deserializer
+mystr string from deserializer
+PREHOOK: query: select * from hbase_ts
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hbase_ts
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hbase_ts
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hbase_ts
+#### A masked pattern was here ####
+238 2001-02-03 04:05:06.123 val_238
+311 2001-02-03 04:05:06.123 val_311
+86 2001-02-03 04:05:06.123 val_86
+PREHOOK: query: drop table hbase_str
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hbase_str
+PREHOOK: Output: default@hbase_str
+POSTHOOK: query: drop table hbase_str
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hbase_str
+POSTHOOK: Output: default@hbase_str
+PREHOOK: query: drop table hbase_ts
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hbase_ts
+PREHOOK: Output: default@hbase_ts
+POSTHOOK: query: drop table hbase_ts
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hbase_ts
+POSTHOOK: Output: default@hbase_ts
Modified: hive/trunk/pom.xml
URL:
http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Mon Feb 2 17:04:41 2015
@@ -134,6 +134,7 @@
<jersey.version>1.14</jersey.version>
<jline.version>2.12</jline.version>
<jms.version>1.1</jms.version>
+ <joda.version>2.5</joda.version>
<jodd.version>3.5.2</jodd.version>
<json.version>20090211</json.version>
<junit.version>4.11</junit.version>
Modified: hive/trunk/ql/pom.xml
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/pom.xml?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/ql/pom.xml (original)
+++ hive/trunk/ql/pom.xml Mon Feb 2 17:04:41 2015
@@ -689,6 +689,7 @@
<include>com.google.guava:guava</include>
<include>net.sf.opencsv:opencsv</include>
<include>org.apache.hive:spark-client</include>
+ <include>joda-time:joda-time</include>
</includes>
</artifactSet>
<relocations>
Added: hive/trunk/ql/src/test/queries/clientpositive/timestamp_formats.q
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/timestamp_formats.q?rev=1656519&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/timestamp_formats.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/timestamp_formats.q Mon Feb
2 17:04:41 2015
@@ -0,0 +1,23 @@
+
+CREATE TABLE timestamp_formats (
+ c1 string,
+ c1_ts timestamp,
+ c2 string,
+ c2_ts timestamp,
+ c3 string,
+ c3_ts timestamp
+);
+
+LOAD DATA LOCAL INPATH '../../data/files/ts_formats.txt' overwrite into table
timestamp_formats;
+
+SELECT * FROM timestamp_formats;
+
+-- Add single timestamp format. This should allow c3_ts to parse
+ALTER TABLE timestamp_formats SET SERDEPROPERTIES
("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss");
+SELECT * FROM timestamp_formats;
+
+-- Add another format, to allow c2_ts to parse
+ALTER TABLE timestamp_formats SET SERDEPROPERTIES
("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss,yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS");
+SELECT * FROM timestamp_formats;
+
+DROP TABLE timestamp_formats;
Added: hive/trunk/ql/src/test/results/clientpositive/timestamp_formats.q.out
URL:
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/timestamp_formats.q.out?rev=1656519&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/timestamp_formats.q.out
(added)
+++ hive/trunk/ql/src/test/results/clientpositive/timestamp_formats.q.out Mon
Feb 2 17:04:41 2015
@@ -0,0 +1,145 @@
+PREHOOK: query: CREATE TABLE timestamp_formats (
+ c1 string,
+ c1_ts timestamp,
+ c2 string,
+ c2_ts timestamp,
+ c3 string,
+ c3_ts timestamp
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@timestamp_formats
+POSTHOOK: query: CREATE TABLE timestamp_formats (
+ c1 string,
+ c1_ts timestamp,
+ c2 string,
+ c2_ts timestamp,
+ c3 string,
+ c3_ts timestamp
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@timestamp_formats
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/ts_formats.txt'
overwrite into table timestamp_formats
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@timestamp_formats
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/ts_formats.txt'
overwrite into table timestamp_formats
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@timestamp_formats
+PREHOOK: query: SELECT * FROM timestamp_formats
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_formats
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM timestamp_formats
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_formats
+#### A masked pattern was here ####
+2011-01-01 01:01:01.111111111 2011-01-01 01:01:01.111111111
2011-01-01T01:01:01.111111111 NULL 2011-01-01T01:01:01 NULL
+2012-02-02 02:02:02.222222222 2012-02-02 02:02:02.222222222
2012-02-02T02:02:02.222222222 NULL 2012-02-02T02:02:02 NULL
+2013-03-03 03:03:03.333333333 2013-03-03 03:03:03.333333333
2013-03-03T03:03:03.333333333 NULL 2013-03-03T03:03:03 NULL
+2014-04-04 04:04:04.444444444 2014-04-04 04:04:04.444444444
2014-04-04T04:04:04.444444444 NULL 2014-04-04T04:04:04 NULL
+2015-05-05 05:05:05.555555555 2015-05-05 05:05:05.555555555
2015-05-05T05:05:05.555555555 NULL 2015-05-05T05:05:05 NULL
+2016-06-06 06:06:06.666666666 2016-06-06 06:06:06.666666666
2016-06-06T06:06:06.666666666 NULL 2016-06-06T06:06:06 NULL
+2017-07-07 07:07:07.777777777 2017-07-07 07:07:07.777777777
2017-07-07T07:07:07.777777777 NULL 2017-07-07T07:07:07 NULL
+2018-08-08 08:08:08.888888888 2018-08-08 08:08:08.888888888
2018-08-08T08:08:08.888888888 NULL 2018-08-08T08:08:08 NULL
+2019-09-09 09:09:09.999999999 2019-09-09 09:09:09.999999999
2019-09-09T09:09:09.999999999 NULL 2019-09-09T09:09:09 NULL
+2020-10-10 10:10:10.101010101 2020-10-10 10:10:10.101010101
2020-10-10T10:10:10.101010101 NULL 2020-10-10T10:10:10 NULL
+2021-11-11 11:11:11.111111111 2021-11-11 11:11:11.111111111
2021-11-11T11:11:11.111111111 NULL 2021-11-11T11:11:11 NULL
+2022-12-12 12:12:12.121212121 2022-12-12 12:12:12.121212121
2022-12-12T12:12:12.121212121 NULL 2022-12-12T12:12:12 NULL
+2023-01-02 13:13:13.131313131 2023-01-02 13:13:13.131313131
2023-01-02T13:13:13.131313131 NULL 2023-01-02T13:13:13 NULL
+2024-02-02 14:14:14.141414141 2024-02-02 14:14:14.141414141
2024-02-02T14:14:14.141414141 NULL 2024-02-02T14:14:14 NULL
+2025-03-03 15:15:15.151515151 2025-03-03 15:15:15.151515151
2025-03-03T15:15:15.151515151 NULL 2025-03-03T15:15:15 NULL
+2026-04-04 16:16:16.161616161 2026-04-04 16:16:16.161616161
2026-04-04T16:16:16.161616161 NULL 2026-04-04T16:16:16 NULL
+2027-05-05 17:17:17.171717171 2027-05-05 17:17:17.171717171
2027-05-05T17:17:17.171717171 NULL 2027-05-05T17:17:17 NULL
+2028-06-06 18:18:18.181818181 2028-06-06 18:18:18.181818181
2028-06-06T18:18:18.181818181 NULL 2028-06-06T18:18:18 NULL
+2029-07-07 19:19:19.191919191 2029-07-07 19:19:19.191919191
2029-07-07T19:19:19.191919191 NULL 2029-07-07T19:19:19 NULL
+2030-08-08 20:20:20.202020202 2030-08-08 20:20:20.202020202
2030-08-08T20:20:20.202020202 NULL 2030-08-08T20:20:20 NULL
+2031-09-09 21:21:21.212121212 2031-09-09 21:21:21.212121212
2031-09-09T21:21:21.212121212 NULL 2031-09-09T21:21:21 NULL
+PREHOOK: query: -- Add single timestamp format. This should allow c3_ts to
parse
+ALTER TABLE timestamp_formats SET SERDEPROPERTIES
("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss")
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+PREHOOK: Input: default@timestamp_formats
+PREHOOK: Output: default@timestamp_formats
+POSTHOOK: query: -- Add single timestamp format. This should allow c3_ts to
parse
+ALTER TABLE timestamp_formats SET SERDEPROPERTIES
("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss")
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: default@timestamp_formats
+POSTHOOK: Output: default@timestamp_formats
+PREHOOK: query: SELECT * FROM timestamp_formats
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_formats
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM timestamp_formats
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_formats
+#### A masked pattern was here ####
+2011-01-01 01:01:01.111111111 2011-01-01 01:01:01.111111111
2011-01-01T01:01:01.111111111 NULL 2011-01-01T01:01:01 2011-01-01
01:01:01
+2012-02-02 02:02:02.222222222 2012-02-02 02:02:02.222222222
2012-02-02T02:02:02.222222222 NULL 2012-02-02T02:02:02 2012-02-02
02:02:02
+2013-03-03 03:03:03.333333333 2013-03-03 03:03:03.333333333
2013-03-03T03:03:03.333333333 NULL 2013-03-03T03:03:03 2013-03-03
03:03:03
+2014-04-04 04:04:04.444444444 2014-04-04 04:04:04.444444444
2014-04-04T04:04:04.444444444 NULL 2014-04-04T04:04:04 2014-04-04
04:04:04
+2015-05-05 05:05:05.555555555 2015-05-05 05:05:05.555555555
2015-05-05T05:05:05.555555555 NULL 2015-05-05T05:05:05 2015-05-05
05:05:05
+2016-06-06 06:06:06.666666666 2016-06-06 06:06:06.666666666
2016-06-06T06:06:06.666666666 NULL 2016-06-06T06:06:06 2016-06-06
06:06:06
+2017-07-07 07:07:07.777777777 2017-07-07 07:07:07.777777777
2017-07-07T07:07:07.777777777 NULL 2017-07-07T07:07:07 2017-07-07
07:07:07
+2018-08-08 08:08:08.888888888 2018-08-08 08:08:08.888888888
2018-08-08T08:08:08.888888888 NULL 2018-08-08T08:08:08 2018-08-08
08:08:08
+2019-09-09 09:09:09.999999999 2019-09-09 09:09:09.999999999
2019-09-09T09:09:09.999999999 NULL 2019-09-09T09:09:09 2019-09-09
09:09:09
+2020-10-10 10:10:10.101010101 2020-10-10 10:10:10.101010101
2020-10-10T10:10:10.101010101 NULL 2020-10-10T10:10:10 2020-10-10
10:10:10
+2021-11-11 11:11:11.111111111 2021-11-11 11:11:11.111111111
2021-11-11T11:11:11.111111111 NULL 2021-11-11T11:11:11 2021-11-11
11:11:11
+2022-12-12 12:12:12.121212121 2022-12-12 12:12:12.121212121
2022-12-12T12:12:12.121212121 NULL 2022-12-12T12:12:12 2022-12-12
12:12:12
+2023-01-02 13:13:13.131313131 2023-01-02 13:13:13.131313131
2023-01-02T13:13:13.131313131 NULL 2023-01-02T13:13:13 2023-01-02
13:13:13
+2024-02-02 14:14:14.141414141 2024-02-02 14:14:14.141414141
2024-02-02T14:14:14.141414141 NULL 2024-02-02T14:14:14 2024-02-02
14:14:14
+2025-03-03 15:15:15.151515151 2025-03-03 15:15:15.151515151
2025-03-03T15:15:15.151515151 NULL 2025-03-03T15:15:15 2025-03-03
15:15:15
+2026-04-04 16:16:16.161616161 2026-04-04 16:16:16.161616161
2026-04-04T16:16:16.161616161 NULL 2026-04-04T16:16:16 2026-04-04
16:16:16
+2027-05-05 17:17:17.171717171 2027-05-05 17:17:17.171717171
2027-05-05T17:17:17.171717171 NULL 2027-05-05T17:17:17 2027-05-05
17:17:17
+2028-06-06 18:18:18.181818181 2028-06-06 18:18:18.181818181
2028-06-06T18:18:18.181818181 NULL 2028-06-06T18:18:18 2028-06-06
18:18:18
+2029-07-07 19:19:19.191919191 2029-07-07 19:19:19.191919191
2029-07-07T19:19:19.191919191 NULL 2029-07-07T19:19:19 2029-07-07
19:19:19
+2030-08-08 20:20:20.202020202 2030-08-08 20:20:20.202020202
2030-08-08T20:20:20.202020202 NULL 2030-08-08T20:20:20 2030-08-08
20:20:20
+2031-09-09 21:21:21.212121212 2031-09-09 21:21:21.212121212
2031-09-09T21:21:21.212121212 NULL 2031-09-09T21:21:21 2031-09-09
21:21:21
+PREHOOK: query: -- Add another format, to allow c2_ts to parse
+ALTER TABLE timestamp_formats SET SERDEPROPERTIES
("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss,yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS")
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+PREHOOK: Input: default@timestamp_formats
+PREHOOK: Output: default@timestamp_formats
+POSTHOOK: query: -- Add another format, to allow c2_ts to parse
+ALTER TABLE timestamp_formats SET SERDEPROPERTIES
("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss,yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS")
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: default@timestamp_formats
+POSTHOOK: Output: default@timestamp_formats
+PREHOOK: query: SELECT * FROM timestamp_formats
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_formats
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM timestamp_formats
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_formats
+#### A masked pattern was here ####
+2011-01-01 01:01:01.111111111 2011-01-01 01:01:01.111111111
2011-01-01T01:01:01.111111111 2011-01-01 01:01:01.111 2011-01-01T01:01:01
2011-01-01 01:01:01
+2012-02-02 02:02:02.222222222 2012-02-02 02:02:02.222222222
2012-02-02T02:02:02.222222222 2012-02-02 02:02:02.222 2012-02-02T02:02:02
2012-02-02 02:02:02
+2013-03-03 03:03:03.333333333 2013-03-03 03:03:03.333333333
2013-03-03T03:03:03.333333333 2013-03-03 03:03:03.333 2013-03-03T03:03:03
2013-03-03 03:03:03
+2014-04-04 04:04:04.444444444 2014-04-04 04:04:04.444444444
2014-04-04T04:04:04.444444444 2014-04-04 04:04:04.444 2014-04-04T04:04:04
2014-04-04 04:04:04
+2015-05-05 05:05:05.555555555 2015-05-05 05:05:05.555555555
2015-05-05T05:05:05.555555555 2015-05-05 05:05:05.555 2015-05-05T05:05:05
2015-05-05 05:05:05
+2016-06-06 06:06:06.666666666 2016-06-06 06:06:06.666666666
2016-06-06T06:06:06.666666666 2016-06-06 06:06:06.666 2016-06-06T06:06:06
2016-06-06 06:06:06
+2017-07-07 07:07:07.777777777 2017-07-07 07:07:07.777777777
2017-07-07T07:07:07.777777777 2017-07-07 07:07:07.777 2017-07-07T07:07:07
2017-07-07 07:07:07
+2018-08-08 08:08:08.888888888 2018-08-08 08:08:08.888888888
2018-08-08T08:08:08.888888888 2018-08-08 08:08:08.888 2018-08-08T08:08:08
2018-08-08 08:08:08
+2019-09-09 09:09:09.999999999 2019-09-09 09:09:09.999999999
2019-09-09T09:09:09.999999999 2019-09-09 09:09:09.999 2019-09-09T09:09:09
2019-09-09 09:09:09
+2020-10-10 10:10:10.101010101 2020-10-10 10:10:10.101010101
2020-10-10T10:10:10.101010101 2020-10-10 10:10:10.101 2020-10-10T10:10:10
2020-10-10 10:10:10
+2021-11-11 11:11:11.111111111 2021-11-11 11:11:11.111111111
2021-11-11T11:11:11.111111111 2021-11-11 11:11:11.111 2021-11-11T11:11:11
2021-11-11 11:11:11
+2022-12-12 12:12:12.121212121 2022-12-12 12:12:12.121212121
2022-12-12T12:12:12.121212121 2022-12-12 12:12:12.121 2022-12-12T12:12:12
2022-12-12 12:12:12
+2023-01-02 13:13:13.131313131 2023-01-02 13:13:13.131313131
2023-01-02T13:13:13.131313131 2023-01-02 13:13:13.131 2023-01-02T13:13:13
2023-01-02 13:13:13
+2024-02-02 14:14:14.141414141 2024-02-02 14:14:14.141414141
2024-02-02T14:14:14.141414141 2024-02-02 14:14:14.141 2024-02-02T14:14:14
2024-02-02 14:14:14
+2025-03-03 15:15:15.151515151 2025-03-03 15:15:15.151515151
2025-03-03T15:15:15.151515151 2025-03-03 15:15:15.151 2025-03-03T15:15:15
2025-03-03 15:15:15
+2026-04-04 16:16:16.161616161 2026-04-04 16:16:16.161616161
2026-04-04T16:16:16.161616161 2026-04-04 16:16:16.161 2026-04-04T16:16:16
2026-04-04 16:16:16
+2027-05-05 17:17:17.171717171 2027-05-05 17:17:17.171717171
2027-05-05T17:17:17.171717171 2027-05-05 17:17:17.171 2027-05-05T17:17:17
2027-05-05 17:17:17
+2028-06-06 18:18:18.181818181 2028-06-06 18:18:18.181818181
2028-06-06T18:18:18.181818181 2028-06-06 18:18:18.181 2028-06-06T18:18:18
2028-06-06 18:18:18
+2029-07-07 19:19:19.191919191 2029-07-07 19:19:19.191919191
2029-07-07T19:19:19.191919191 2029-07-07 19:19:19.191 2029-07-07T19:19:19
2029-07-07 19:19:19
+2030-08-08 20:20:20.202020202 2030-08-08 20:20:20.202020202
2030-08-08T20:20:20.202020202 2030-08-08 20:20:20.202 2030-08-08T20:20:20
2030-08-08 20:20:20
+2031-09-09 21:21:21.212121212 2031-09-09 21:21:21.212121212
2031-09-09T21:21:21.212121212 2031-09-09 21:21:21.212 2031-09-09T21:21:21
2031-09-09 21:21:21
+PREHOOK: query: DROP TABLE timestamp_formats
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_formats
+PREHOOK: Output: default@timestamp_formats
+POSTHOOK: query: DROP TABLE timestamp_formats
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_formats
+POSTHOOK: Output: default@timestamp_formats
Modified: hive/trunk/serde/if/serde.thrift
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/if/serde.thrift?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/serde/if/serde.thrift (original)
+++ hive/trunk/serde/if/serde.thrift Mon Feb 2 17:04:41 2015
@@ -69,6 +69,8 @@ const string UNION_TYPE_NAME = "unionty
const string LIST_COLUMNS = "columns";
const string LIST_COLUMN_TYPES = "columns.types";
+const string TIMESTAMP_FORMATS = "timestamp.formats";
+
const set<string> PrimitiveTypes = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME
TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME
FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME VARCHAR_TYPE_NAME
CHAR_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME
DECIMAL_TYPE_NAME BINARY_TYPE_NAME],
const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],
const set<string> IntegralTypes = [ TINYINT_TYPE_NAME SMALLINT_TYPE_NAME
INT_TYPE_NAME BIGINT_TYPE_NAME ],
Modified: hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp (original)
+++ hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp Mon Feb 2
17:04:41 2015
@@ -89,6 +89,8 @@ serdeConstants::serdeConstants() {
LIST_COLUMN_TYPES = "columns.types";
+ TIMESTAMP_FORMATS = "timestamp.formats";
+
PrimitiveTypes.insert("void");
PrimitiveTypes.insert("boolean");
PrimitiveTypes.insert("tinyint");
Modified: hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h (original)
+++ hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h Mon Feb 2
17:04:41 2015
@@ -54,6 +54,7 @@ class serdeConstants {
std::string UNION_TYPE_NAME;
std::string LIST_COLUMNS;
std::string LIST_COLUMN_TYPES;
+ std::string TIMESTAMP_FORMATS;
std::set<std::string> PrimitiveTypes;
std::set<std::string> CollectionTypes;
std::set<std::string> IntegralTypes;
Modified:
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
Mon Feb 2 17:04:41 2015
@@ -111,6 +111,8 @@ public class serdeConstants {
public static final String LIST_COLUMN_TYPES = "columns.types";
+ public static final String TIMESTAMP_FORMATS = "timestamp.formats";
+
public static final Set<String> PrimitiveTypes = new HashSet<String>();
static {
PrimitiveTypes.add("void");
Modified:
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
Mon Feb 2 17:04:41 2015
@@ -528,7 +528,7 @@ public class ThriftTestObj implements or
struct.field3 = new ArrayList<InnerStruct>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- InnerStruct _elem2; // required
+ InnerStruct _elem2; // optional
_elem2 = new InnerStruct();
_elem2.read(iprot);
struct.field3.add(_elem2);
@@ -636,7 +636,7 @@ public class ThriftTestObj implements or
struct.field3 = new ArrayList<InnerStruct>(_list5.size);
for (int _i6 = 0; _i6 < _list5.size; ++_i6)
{
- InnerStruct _elem7; // required
+ InnerStruct _elem7; // optional
_elem7 = new InnerStruct();
_elem7.read(iprot);
struct.field3.add(_elem7);
Modified:
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
Mon Feb 2 17:04:41 2015
@@ -1211,7 +1211,7 @@ public class Complex implements org.apac
struct.lint = new ArrayList<Integer>(_list18.size);
for (int _i19 = 0; _i19 < _list18.size; ++_i19)
{
- int _elem20; // required
+ int _elem20; // optional
_elem20 = iprot.readI32();
struct.lint.add(_elem20);
}
@@ -1229,7 +1229,7 @@ public class Complex implements org.apac
struct.lString = new ArrayList<String>(_list21.size);
for (int _i22 = 0; _i22 < _list21.size; ++_i22)
{
- String _elem23; // required
+ String _elem23; // optional
_elem23 = iprot.readString();
struct.lString.add(_elem23);
}
@@ -1247,7 +1247,7 @@ public class Complex implements org.apac
struct.lintString = new ArrayList<IntString>(_list24.size);
for (int _i25 = 0; _i25 < _list24.size; ++_i25)
{
- IntString _elem26; // required
+ IntString _elem26; // optional
_elem26 = new IntString();
_elem26.read(iprot);
struct.lintString.add(_elem26);
@@ -1610,7 +1610,7 @@ public class Complex implements org.apac
struct.lint = new ArrayList<Integer>(_list57.size);
for (int _i58 = 0; _i58 < _list57.size; ++_i58)
{
- int _elem59; // required
+ int _elem59; // optional
_elem59 = iprot.readI32();
struct.lint.add(_elem59);
}
@@ -1623,7 +1623,7 @@ public class Complex implements org.apac
struct.lString = new ArrayList<String>(_list60.size);
for (int _i61 = 0; _i61 < _list60.size; ++_i61)
{
- String _elem62; // required
+ String _elem62; // optional
_elem62 = iprot.readString();
struct.lString.add(_elem62);
}
@@ -1636,7 +1636,7 @@ public class Complex implements org.apac
struct.lintString = new ArrayList<IntString>(_list63.size);
for (int _i64 = 0; _i64 < _list63.size; ++_i64)
{
- IntString _elem65; // required
+ IntString _elem65; // optional
_elem65 = new IntString();
_elem65.read(iprot);
struct.lintString.add(_elem65);
Modified:
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
Mon Feb 2 17:04:41 2015
@@ -2280,7 +2280,7 @@ public class MegaStruct implements org.a
_val19 = new ArrayList<String>(_list20.size);
for (int _i21 = 0; _i21 < _list20.size; ++_i21)
{
- String _elem22; // required
+ String _elem22; // optional
_elem22 = iprot.readString();
_val19.add(_elem22);
}
@@ -2310,7 +2310,7 @@ public class MegaStruct implements org.a
_val26 = new ArrayList<MiniStruct>(_list27.size);
for (int _i28 = 0; _i28 < _list27.size; ++_i28)
{
- MiniStruct _elem29; // required
+ MiniStruct _elem29; // optional
_elem29 = new MiniStruct();
_elem29.read(iprot);
_val26.add(_elem29);
@@ -2333,7 +2333,7 @@ public class MegaStruct implements org.a
struct.my_stringlist = new ArrayList<String>(_list30.size);
for (int _i31 = 0; _i31 < _list30.size; ++_i31)
{
- String _elem32; // required
+ String _elem32; // optional
_elem32 = iprot.readString();
struct.my_stringlist.add(_elem32);
}
@@ -2351,7 +2351,7 @@ public class MegaStruct implements org.a
struct.my_structlist = new ArrayList<MiniStruct>(_list33.size);
for (int _i34 = 0; _i34 < _list33.size; ++_i34)
{
- MiniStruct _elem35; // required
+ MiniStruct _elem35; // optional
_elem35 = new MiniStruct();
_elem35.read(iprot);
struct.my_structlist.add(_elem35);
@@ -2370,7 +2370,7 @@ public class MegaStruct implements org.a
struct.my_enumlist = new ArrayList<MyEnum>(_list36.size);
for (int _i37 = 0; _i37 < _list36.size; ++_i37)
{
- MyEnum _elem38; // required
+ MyEnum _elem38; // optional
_elem38 = MyEnum.findByValue(iprot.readI32());
struct.my_enumlist.add(_elem38);
}
@@ -2388,7 +2388,7 @@ public class MegaStruct implements org.a
struct.my_stringset = new HashSet<String>(2*_set39.size);
for (int _i40 = 0; _i40 < _set39.size; ++_i40)
{
- String _elem41; // required
+ String _elem41; // optional
_elem41 = iprot.readString();
struct.my_stringset.add(_elem41);
}
@@ -2406,7 +2406,7 @@ public class MegaStruct implements org.a
struct.my_enumset = new HashSet<MyEnum>(2*_set42.size);
for (int _i43 = 0; _i43 < _set42.size; ++_i43)
{
- MyEnum _elem44; // required
+ MyEnum _elem44; // optional
_elem44 = MyEnum.findByValue(iprot.readI32());
struct.my_enumset.add(_elem44);
}
@@ -2424,7 +2424,7 @@ public class MegaStruct implements org.a
struct.my_structset = new HashSet<MiniStruct>(2*_set45.size);
for (int _i46 = 0; _i46 < _set45.size; ++_i46)
{
- MiniStruct _elem47; // required
+ MiniStruct _elem47; // optional
_elem47 = new MiniStruct();
_elem47.read(iprot);
struct.my_structset.add(_elem47);
@@ -3023,7 +3023,7 @@ public class MegaStruct implements org.a
_val95 = new ArrayList<String>(_list96.size);
for (int _i97 = 0; _i97 < _list96.size; ++_i97)
{
- String _elem98; // required
+ String _elem98; // optional
_elem98 = iprot.readString();
_val95.add(_elem98);
}
@@ -3047,7 +3047,7 @@ public class MegaStruct implements org.a
_val102 = new ArrayList<MiniStruct>(_list103.size);
for (int _i104 = 0; _i104 < _list103.size; ++_i104)
{
- MiniStruct _elem105; // required
+ MiniStruct _elem105; // optional
_elem105 = new MiniStruct();
_elem105.read(iprot);
_val102.add(_elem105);
@@ -3064,7 +3064,7 @@ public class MegaStruct implements org.a
struct.my_stringlist = new ArrayList<String>(_list106.size);
for (int _i107 = 0; _i107 < _list106.size; ++_i107)
{
- String _elem108; // required
+ String _elem108; // optional
_elem108 = iprot.readString();
struct.my_stringlist.add(_elem108);
}
@@ -3077,7 +3077,7 @@ public class MegaStruct implements org.a
struct.my_structlist = new ArrayList<MiniStruct>(_list109.size);
for (int _i110 = 0; _i110 < _list109.size; ++_i110)
{
- MiniStruct _elem111; // required
+ MiniStruct _elem111; // optional
_elem111 = new MiniStruct();
_elem111.read(iprot);
struct.my_structlist.add(_elem111);
@@ -3091,7 +3091,7 @@ public class MegaStruct implements org.a
struct.my_enumlist = new ArrayList<MyEnum>(_list112.size);
for (int _i113 = 0; _i113 < _list112.size; ++_i113)
{
- MyEnum _elem114; // required
+ MyEnum _elem114; // optional
_elem114 = MyEnum.findByValue(iprot.readI32());
struct.my_enumlist.add(_elem114);
}
@@ -3104,7 +3104,7 @@ public class MegaStruct implements org.a
struct.my_stringset = new HashSet<String>(2*_set115.size);
for (int _i116 = 0; _i116 < _set115.size; ++_i116)
{
- String _elem117; // required
+ String _elem117; // optional
_elem117 = iprot.readString();
struct.my_stringset.add(_elem117);
}
@@ -3117,7 +3117,7 @@ public class MegaStruct implements org.a
struct.my_enumset = new HashSet<MyEnum>(2*_set118.size);
for (int _i119 = 0; _i119 < _set118.size; ++_i119)
{
- MyEnum _elem120; // required
+ MyEnum _elem120; // optional
_elem120 = MyEnum.findByValue(iprot.readI32());
struct.my_enumset.add(_elem120);
}
@@ -3130,7 +3130,7 @@ public class MegaStruct implements org.a
struct.my_structset = new HashSet<MiniStruct>(2*_set121.size);
for (int _i122 = 0; _i122 < _set121.size; ++_i122)
{
- MiniStruct _elem123; // required
+ MiniStruct _elem123; // optional
_elem123 = new MiniStruct();
_elem123.read(iprot);
struct.my_structset.add(_elem123);
Modified:
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
Mon Feb 2 17:04:41 2015
@@ -300,7 +300,7 @@ public class PropValueUnion extends org.
lString = new ArrayList<String>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
- String _elem2; // required
+ String _elem2; // optional
_elem2 = iprot.readString();
lString.add(_elem2);
}
@@ -423,7 +423,7 @@ public class PropValueUnion extends org.
lString = new ArrayList<String>(_list9.size);
for (int _i10 = 0; _i10 < _list9.size; ++_i10)
{
- String _elem11; // required
+ String _elem11; // optional
_elem11 = iprot.readString();
lString.add(_elem11);
}
Modified:
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
Mon Feb 2 17:04:41 2015
@@ -431,7 +431,7 @@ public class SetIntString implements org
struct.sIntString = new HashSet<IntString>(2*_set82.size);
for (int _i83 = 0; _i83 < _set82.size; ++_i83)
{
- IntString _elem84; // required
+ IntString _elem84; // optional
_elem84 = new IntString();
_elem84.read(iprot);
struct.sIntString.add(_elem84);
@@ -530,7 +530,7 @@ public class SetIntString implements org
struct.sIntString = new HashSet<IntString>(2*_set87.size);
for (int _i88 = 0; _i88 < _set87.size; ++_i88)
{
- IntString _elem89; // required
+ IntString _elem89; // optional
_elem89 = new IntString();
_elem89.read(iprot);
struct.sIntString.add(_elem89);
Modified:
hive/trunk/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
Mon Feb 2 17:04:41 2015
@@ -94,6 +94,8 @@ $GLOBALS['serde_CONSTANTS']['LIST_COLUMN
$GLOBALS['serde_CONSTANTS']['LIST_COLUMN_TYPES'] = "columns.types";
+$GLOBALS['serde_CONSTANTS']['TIMESTAMP_FORMATS'] = "timestamp.formats";
+
$GLOBALS['serde_CONSTANTS']['PrimitiveTypes'] = array(
"void" => true,
"boolean" => true,
Modified:
hive/trunk/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
(original)
+++
hive/trunk/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
Mon Feb 2 17:04:41 2015
@@ -48,6 +48,7 @@ STRUCT_TYPE_NAME = "struct"
UNION_TYPE_NAME = "uniontype"
LIST_COLUMNS = "columns"
LIST_COLUMN_TYPES = "columns.types"
+TIMESTAMP_FORMATS = "timestamp.formats"
PrimitiveTypes = set([
"void",
"boolean",
Modified: hive/trunk/serde/src/gen/thrift/gen-rb/serde_constants.rb
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/gen/thrift/gen-rb/serde_constants.rb?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
--- hive/trunk/serde/src/gen/thrift/gen-rb/serde_constants.rb (original)
+++ hive/trunk/serde/src/gen/thrift/gen-rb/serde_constants.rb Mon Feb 2
17:04:41 2015
@@ -85,6 +85,8 @@ LIST_COLUMNS = %q"columns"
LIST_COLUMN_TYPES = %q"columns.types"
+TIMESTAMP_FORMATS = %q"timestamp.formats"
+
PrimitiveTypes = Set.new([
%q"void",
%q"boolean",
Modified:
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
(original)
+++
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroLazyObjectInspector.java
Mon Feb 2 17:04:41 2015
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.serde2.laz
import
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
import
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
import
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyUnionObjectInspector;
+import
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -84,6 +85,7 @@ public class AvroLazyObjectInspector ext
* @param escaped whether the data is escaped or not
* @param escapeChar if escaped is true, the escape character
* */
+ @Deprecated
public AvroLazyObjectInspector(List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors, List<String>
structFieldComments,
byte separator, Text nullSequence, boolean lastColumnTakesRest, boolean
escaped,
@@ -92,6 +94,12 @@ public class AvroLazyObjectInspector ext
nullSequence, lastColumnTakesRest, escaped, escapeChar);
}
+ public AvroLazyObjectInspector(List<String> structFieldNames,
+ List<ObjectInspector> structFieldObjectInspectors, List<String>
structFieldComments,
+ byte separator, LazyObjectInspectorParameters lazyParams) {
+ super(structFieldNames, structFieldObjectInspectors, structFieldComments,
separator, lazyParams);
+ }
+
/**
* Set the reader schema for the {@link AvroLazyObjectInspector} to the
given schema
* */
Modified:
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
URL:
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java?rev=1656519&r1=1656518&r2=1656519&view=diff
==============================================================================
---
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
(original)
+++
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
Mon Feb 2 17:04:41 2015
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.serde2.Ser
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import
org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -96,9 +97,7 @@ public class ColumnarSerDe extends Colum
// Create the ObjectInspectors for the fields. Note: Currently
// ColumnarObject uses same ObjectInpector as LazyStruct
cachedObjectInspector = LazyFactory.createColumnarStructInspector(
- serdeParams.getColumnNames(), serdeParams.getColumnTypes(), serdeParams
- .getSeparators(), serdeParams.getNullSequence(), serdeParams
- .isEscaped(), serdeParams.getEscapeChar());
+ serdeParams.getColumnNames(), serdeParams.getColumnTypes(),
serdeParams);
int size = serdeParams.getColumnTypes().size();
List<Integer> notSkipIDs = new ArrayList<Integer>();