sankarh commented on a change in pull request #2550:
URL: https://github.com/apache/hive/pull/2550#discussion_r684301709
##########
File path: ql/src/test/queries/clientpositive/foldts.q
##########
@@ -23,14 +23,14 @@ select from_unixtime(unix_timestamp(ctimestamp1), 'EEEE')
from alltypesorc limit
select from_unixtime(to_unix_timestamp(ctimestamp1)) from alltypesorc limit 1;
-select from_unixtime(unix_timestamp(ctimestamp1) ,"yyyy-MM-dd'T'HH:mm:ssXXX")
from alltypesorc limit 1;
+select from_unixtime(unix_timestamp(ctimestamp1) ,"uuuu-MM-dd'T'HH:mm:ssXXX")
from alltypesorc limit 1;
Review comment:
We should allow the format "yyyy" too as many existing users might have
this. Instead of overwriting existing test, we can add new one with "uuuu"
##########
File path: ql/src/test/queries/clientpositive/udf5.q
##########
@@ -13,6 +13,14 @@ SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40',
'yyyy-MM-dd HH:mm:ss'
SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd
HH:mm:ss'), 'MM/dd/yy HH:mm:ss'), from_unixtime(unix_timestamp('2010-01-13
11:57:40')) from dest1_n14;
+SELECT from_unixtime(unix_timestamp(cast('2010-01-13' as date)));
+
+SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd
HH:mm:ss'), 'MM/dd/yy HH:mm:ss');
+
+select from_unixtime(unix_timestamp('2010-01-13 11:57:40'), 'EEEE');
+
+select from_unixtime(unix_timestamp(cast('2010-01-13 11:57:40' || '
America/Los_Angeles' as timestamp with local time zone))
,"yyyy-MM-dd'T'HH:mm:ssXXX") ;
+
Review comment:
Can you add all the test cases listed in the JIRA description to confirm
if we get matching output as Hive 1.2?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUnixTime.java
##########
@@ -18,158 +18,101 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.text.SimpleDateFormat;
+import java.time.Instant;
import java.time.ZoneId;
-import java.util.Date;
-import java.util.TimeZone;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import
org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
import
org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import static
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
/**
* GenericUDFFromUnixTime.
*
*/
@Description(name = "from_unixtime",
- value = "_FUNC_(unix_time, format) - returns unix_time in the specified
format",
+ value = "_FUNC_(unix_time_in_seconds, format) - returns unix_time in the
specified format",
extended = "Example:\n"
- + " > SELECT _FUNC_(0, 'yyyy-MM-dd HH:mm:ss') FROM src LIMIT 1;\n"
+ + " > SELECT _FUNC_(0, 'uuuu-MM-dd HH:mm:ss') FROM src LIMIT 1;\n"
+ " '1970-01-01 00:00:00'")
public class GenericUDFFromUnixTime extends GenericUDF {
- private transient IntObjectInspector inputIntOI;
private transient LongObjectInspector inputLongOI;
- private transient Converter inputTextConverter;
private transient ZoneId timeZone;
private transient final Text result = new Text();
-
- private transient SimpleDateFormat formatter = new
SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- private transient String lastFormat = null;
-
+ private transient String lastFormat ="uuuu-MM-dd HH:mm:ss";
+ private transient DateTimeFormatter FORMATTER =
DateTimeFormatter.ofPattern(lastFormat);
+ private transient Converter[] converters = new Converter[2];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes =
new PrimitiveObjectInspector.PrimitiveCategory[2];
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws
UDFArgumentException {
- if (arguments.length < 1) {
- throw new UDFArgumentLengthException("The function " +
getName().toUpperCase() +
- "requires at least one argument");
- }
- if (arguments.length > 2) {
- throw new UDFArgumentLengthException("Too many arguments for the
function " + getName().toUpperCase());
- }
- for (ObjectInspector argument : arguments) {
- if (argument.getCategory() != Category.PRIMITIVE) {
- throw new UDFArgumentException(getName().toUpperCase() +
- " only takes primitive types, got " + argument.getTypeName());
- }
+ checkArgsSize(arguments, 1, 2);
+
+ for (int i = 0; i < arguments.length; i++) {
+ checkArgPrimitive(arguments, i);
}
PrimitiveObjectInspector arg0OI = (PrimitiveObjectInspector) arguments[0];
- switch (arg0OI.getPrimitiveCategory()) {
- case INT:
- inputIntOI = (IntObjectInspector) arguments[0];
- break;
- case LONG:
- inputLongOI = (LongObjectInspector) arguments[0];
- break;
- default:
- throw new UDFArgumentException("The function " +
getName().toUpperCase()
- + " takes only int/long types for first argument. Got Type:" +
arg0OI.getPrimitiveCategory().name());
+ if(arg0OI.getPrimitiveCategory() ==
PrimitiveObjectInspector.PrimitiveCategory.LONG) {
Review comment:
The old code also allows INT values. Shouldn't we keep the same?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]