ashish-kumar-sharma commented on a change in pull request #2550:
URL: https://github.com/apache/hive/pull/2550#discussion_r681825649
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java
##########
@@ -87,89 +75,58 @@ public ObjectInspector initialize(ObjectInspector[]
arguments) throws UDFArgumen
inputLongOI = (LongObjectInspector) arguments[0];
break;
default:
- throw new UDFArgumentException("The function " +
getName().toUpperCase()
- + " takes only int/long types for first argument. Got Type:" +
arg0OI.getPrimitiveCategory().name());
+ throw new UDFArgumentException("The function from_unixtime takes only
int/long types for first argument. Got Type:"
+ + arg0OI.getPrimitiveCategory().name());
}
if (arguments.length == 2) {
- PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector)
arguments[1];
- switch (arg1OI.getPrimitiveCategory()) {
- case CHAR:
- case VARCHAR:
- case STRING:
- inputTextConverter = ObjectInspectorConverters.getConverter(arg1OI,
- PrimitiveObjectInspectorFactory.javaStringObjectInspector);
- break;
- default:
- throw new UDFArgumentException("The function " +
getName().toUpperCase()
- + " takes only string type for second argument. Got Type:" +
arg1OI.getPrimitiveCategory().name());
- }
+ checkArgGroups(arguments, 1, inputTypes, STRING_GROUP);
+ obtainStringConverter(arguments, 1, inputTypes, converters);
}
- if (timeZone == null) {
- timeZone = SessionState.get() == null ? new
HiveConf().getLocalTimeZone() : SessionState.get().getConf()
- .getLocalTimeZone();
- formatter.setTimeZone(TimeZone.getTimeZone(timeZone));
- }
+ timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone()
: SessionState.get().getConf()
+ .getLocalTimeZone();
+ FORMATTER.withZone(timeZone);
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
}
- @Override
- public void configure(MapredContext context) {
- if (context != null) {
- String timeZoneStr = HiveConf.getVar(context.getJobConf(),
HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE);
- timeZone = TimestampTZUtil.parseTimeZone(timeZoneStr);
- formatter.setTimeZone(TimeZone.getTimeZone(timeZone));
- }
- }
-
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
- if (inputTextConverter != null) {
- if (arguments[1].get() == null) {
- return null;
- }
- String format = (String) inputTextConverter.convert(arguments[1].get());
+ if(arguments.length == 2) {
+ String format = getStringValue(arguments, 1, converters);
if (format == null) {
return null;
}
if (!format.equals(lastFormat)) {
- formatter = new SimpleDateFormat(format);
- formatter.setTimeZone(TimeZone.getTimeZone(timeZone));
+ FORMATTER = DateTimeFormatter.ofPattern(format);
lastFormat = format;
}
}
// convert seconds to milliseconds
long unixtime;
+ Instant i;
Review comment:
https://cwiki.apache.org/confluence/display/hive/languagemanual+udf
So as per the documentation it only accepts the seconds as input. So let
remove Instant.ofEpochMilli() and use on Instant.ofEpochSecond(). For
converting inputIntOI & inputLongOI use the conditional ternary operator (: ?).
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java
##########
@@ -54,28 +51,19 @@
private transient IntObjectInspector inputIntOI;
private transient LongObjectInspector inputLongOI;
- private transient Converter inputTextConverter;
private transient ZoneId timeZone;
private transient final Text result = new Text();
-
- private transient SimpleDateFormat formatter = new
SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private transient String lastFormat = null;
Review comment:
private transient String lastFormat ="uuuu-MM-dd HH:mm:ss" ;
private transient DateTimeFormatter FORMATTER =
DateTimeFormatter.ofPattern(lastFormat);
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java
##########
@@ -87,89 +75,58 @@ public ObjectInspector initialize(ObjectInspector[]
arguments) throws UDFArgumen
inputLongOI = (LongObjectInspector) arguments[0];
break;
default:
- throw new UDFArgumentException("The function " +
getName().toUpperCase()
- + " takes only int/long types for first argument. Got Type:" +
arg0OI.getPrimitiveCategory().name());
+ throw new UDFArgumentException("The function from_unixtime takes only
int/long types for first argument. Got Type:"
+ + arg0OI.getPrimitiveCategory().name());
}
if (arguments.length == 2) {
- PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector)
arguments[1];
- switch (arg1OI.getPrimitiveCategory()) {
- case CHAR:
- case VARCHAR:
- case STRING:
- inputTextConverter = ObjectInspectorConverters.getConverter(arg1OI,
- PrimitiveObjectInspectorFactory.javaStringObjectInspector);
- break;
- default:
- throw new UDFArgumentException("The function " +
getName().toUpperCase()
- + " takes only string type for second argument. Got Type:" +
arg1OI.getPrimitiveCategory().name());
- }
+ checkArgGroups(arguments, 1, inputTypes, STRING_GROUP);
+ obtainStringConverter(arguments, 1, inputTypes, converters);
}
- if (timeZone == null) {
- timeZone = SessionState.get() == null ? new
HiveConf().getLocalTimeZone() : SessionState.get().getConf()
- .getLocalTimeZone();
- formatter.setTimeZone(TimeZone.getTimeZone(timeZone));
- }
+ timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone()
: SessionState.get().getConf()
+ .getLocalTimeZone();
+ FORMATTER.withZone(timeZone);
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
}
- @Override
- public void configure(MapredContext context) {
- if (context != null) {
- String timeZoneStr = HiveConf.getVar(context.getJobConf(),
HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE);
- timeZone = TimestampTZUtil.parseTimeZone(timeZoneStr);
- formatter.setTimeZone(TimeZone.getTimeZone(timeZone));
- }
- }
-
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0].get() == null) {
return null;
}
- if (inputTextConverter != null) {
- if (arguments[1].get() == null) {
- return null;
- }
- String format = (String) inputTextConverter.convert(arguments[1].get());
+ if(arguments.length == 2) {
+ String format = getStringValue(arguments, 1, converters);
if (format == null) {
return null;
}
if (!format.equals(lastFormat)) {
- formatter = new SimpleDateFormat(format);
- formatter.setTimeZone(TimeZone.getTimeZone(timeZone));
+ FORMATTER = DateTimeFormatter.ofPattern(format);
lastFormat = format;
}
}
// convert seconds to milliseconds
long unixtime;
+ Instant i;
if (inputIntOI != null) {
unixtime = inputIntOI.get(arguments[0].get());
+ i = Instant.ofEpochSecond(unixtime);
} else {
unixtime = inputLongOI.get(arguments[0].get());
+ i = Instant.ofEpochMilli(unixtime * 1000L);
}
- Date date = new Date(unixtime * 1000L);
- result.set(formatter.format(date));
+ ZonedDateTime z = ZonedDateTime.ofInstant(i, timeZone);
Review comment:
nit: naming convention
##########
File path:
ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUnixTime.java
##########
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.time.ZoneId;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.when;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.joda.time.format.DateTimeFormatter;
+import org.joda.time.format.DateTimeFormatterBuilder;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * TestGenericUDFFromUnixTime.
+ */
+public class TestGenericUDFFromUnixTime {
+
+ public static void runAndVerify(GenericUDFFromUnixTime udf,
+ Object arg, Object expected) throws HiveException {
+ DeferredObject[] args = { new DeferredJavaObject(arg) };
+ Object result = udf.evaluate(args);
+ if (expected == null) {
+ assertNull(result);
+ } else {
+ assertEquals(expected.toString(), result.toString());
+ }
+ }
+
+ public static void runAndVerify(GenericUDFFromUnixTime udf,
+ Object arg1, Object arg2, Object expected) throws HiveException {
+ DeferredObject[] args = { new DeferredJavaObject(arg1), new
DeferredJavaObject(arg2) };
+ Object result = udf.evaluate(args);
+
+ if (expected == null) {
+ assertNull(result);
+ } else {
+ assertEquals(expected.toString(), result.toString());
+ }
+ }
+
+ @Test
+ public void testTimestampDefaultTimezone() throws HiveException {
+ ObjectInspector valueLongOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+ GenericUDFFromUnixTime udf = new GenericUDFFromUnixTime();
+ ObjectInspector args[] = {valueLongOI};
+ udf.initialize(args);
+
+ Timestamp ts = Timestamp.valueOf("1470-01-01 00:00:00");
+ TimestampTZ tstz = TimestampTZUtil.convert(ts, ZoneId.systemDefault());
+
+ runAndVerify(udf,
+ new LongWritable(tstz.getEpochSecond()), new Text("1470-01-01
00:00:00"));
+
+ // test null values
+ runAndVerify(udf, null, null);
+ }
+
+ @Test
+ public void testTimestampOtherTimezone() throws HiveException {
+ ObjectInspector valueLongOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+ GenericUDFFromUnixTime udf = new GenericUDFFromUnixTime();
+ ObjectInspector args[] = {valueLongOI};
+ udf.initialize(args);
+
+ Timestamp ts = Timestamp.valueOf("2010-01-13 11:57:40");
+ TimestampTZ tstz1 = TimestampTZUtil.convert(ts,
ZoneId.of("America/Los_Angeles"));
+ TimestampTZ tstz2 = TimestampTZUtil.convert(ts,
ZoneId.of("America/New_York"));
+ TimestampTZ tstz3 = TimestampTZUtil.convert(ts,
ZoneId.of("Europe/London"));
+
+ runAndVerify(udf,
+ new LongWritable(tstz1.getEpochSecond()), new Text("2010-01-13
11:57:40"));
+ runAndVerify(udf,
+ new LongWritable(tstz2.getEpochSecond()), new Text("2010-01-13
08:57:40"));
+ runAndVerify(udf,
+ new LongWritable(tstz3.getEpochSecond()), new Text("2010-01-13
03:57:40"));
+ }
+
+ @Test
+ public void testTimestampWithArg2() throws HiveException {
+ ObjectInspector valueLongOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
Review comment:
Can we have same test with
PrimitiveObjectInspectorFactory.writableIntObjectInspector as well?
##########
File path:
ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUnixTime.java
##########
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.time.ZoneId;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.when;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.joda.time.format.DateTimeFormatter;
+import org.joda.time.format.DateTimeFormatterBuilder;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * TestGenericUDFFromUnixTime.
+ */
+public class TestGenericUDFFromUnixTime {
+
+ public static void runAndVerify(GenericUDFFromUnixTime udf,
+ Object arg, Object expected) throws HiveException {
+ DeferredObject[] args = { new DeferredJavaObject(arg) };
+ Object result = udf.evaluate(args);
+ if (expected == null) {
+ assertNull(result);
+ } else {
+ assertEquals(expected.toString(), result.toString());
+ }
+ }
+
+ public static void runAndVerify(GenericUDFFromUnixTime udf,
+ Object arg1, Object arg2, Object expected) throws HiveException {
+ DeferredObject[] args = { new DeferredJavaObject(arg1), new
DeferredJavaObject(arg2) };
+ Object result = udf.evaluate(args);
+
+ if (expected == null) {
+ assertNull(result);
+ } else {
+ assertEquals(expected.toString(), result.toString());
+ }
+ }
+
+ @Test
+ public void testTimestampDefaultTimezone() throws HiveException {
+ ObjectInspector valueLongOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
Review comment:
Can we have same test with
PrimitiveObjectInspectorFactory.writableIntObjectInspector as well?
##########
File path:
ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUnixTime.java
##########
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.time.ZoneId;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.when;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.joda.time.format.DateTimeFormatter;
+import org.joda.time.format.DateTimeFormatterBuilder;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ * TestGenericUDFFromUnixTime.
+ */
+public class TestGenericUDFFromUnixTime {
+
+ public static void runAndVerify(GenericUDFFromUnixTime udf,
+ Object arg, Object expected) throws HiveException {
+ DeferredObject[] args = { new DeferredJavaObject(arg) };
+ Object result = udf.evaluate(args);
+ if (expected == null) {
+ assertNull(result);
+ } else {
+ assertEquals(expected.toString(), result.toString());
+ }
+ }
+
+ public static void runAndVerify(GenericUDFFromUnixTime udf,
+ Object arg1, Object arg2, Object expected) throws HiveException {
+ DeferredObject[] args = { new DeferredJavaObject(arg1), new
DeferredJavaObject(arg2) };
+ Object result = udf.evaluate(args);
+
+ if (expected == null) {
+ assertNull(result);
+ } else {
+ assertEquals(expected.toString(), result.toString());
+ }
+ }
+
+ @Test
+ public void testTimestampDefaultTimezone() throws HiveException {
+ ObjectInspector valueLongOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+ GenericUDFFromUnixTime udf = new GenericUDFFromUnixTime();
+ ObjectInspector args[] = {valueLongOI};
+ udf.initialize(args);
+
+ Timestamp ts = Timestamp.valueOf("1470-01-01 00:00:00");
+ TimestampTZ tstz = TimestampTZUtil.convert(ts, ZoneId.systemDefault());
+
+ runAndVerify(udf,
+ new LongWritable(tstz.getEpochSecond()), new Text("1470-01-01
00:00:00"));
+
+ // test null values
+ runAndVerify(udf, null, null);
+ }
+
+ @Test
+ public void testTimestampOtherTimezone() throws HiveException {
+ ObjectInspector valueLongOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
Review comment:
Can we have same test with
PrimitiveObjectInspectorFactory.writableIntObjectInspector as well?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]