[ 
https://issues.apache.org/jira/browse/HIVE-6522?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Jason Dere updated HIVE-6522:
-----------------------------

    Description: 
The following test fails:

{code}
hive> describe dec4;
OK
key                     string                  from deserializer   
c1                      string                  from deserializer   
c2                      decimal(10,2)           from deserializer   
Time taken: 0.716 seconds, Fetched: 3 row(s)
hive> select * from dec4;
OK
484     484     484
98      NULL    NULL
278     NULL    NULL
255     255     255
409     NULL    NULL
165     165     165
27      27      27
311     NULL    NULL
86      NULL    NULL
238     NULL    NULL
Time taken: 0.262 seconds, Fetched: 10 row(s)
hive> select avg(cast(key as decimal(3,0))) from dec4;
...

Task failed!
Task ID:
  Stage-1

Logs:

/tmp/jdere/hive.log
FAILED: Execution Error, return code 2 from 
org.apache.hadoop.hive.ql.exec.mr.MapRedTask
{code}

The logs show the following stack trace. 

{noformat}
java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: 
Hive Runtime Error while processing row (tag=0) [Error getting row data with 
exception java.lang.NumberFormatException: Zero length BigInteger
        at java.math.BigInteger.<init>(BigInteger.java:171)
        at 
org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:85)
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector.getPrimitiveJavaObject(WritableHiveDecimalObjectInspector.java:43)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:322)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:265)
        at 
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
        at 
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
 ]
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:282)
        at 
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
        at 
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error 
while processing row (tag=0) [Error getting row data with exception 
java.lang.NumberFormatException: Zero length BigInteger
        at java.math.BigInteger.<init>(BigInteger.java:171)
        at 
org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:85)
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector.getPrimitiveJavaObject(WritableHiveDecimalObjectInspector.java:43)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:322)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:265)
        at 
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
        at 
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
 ]
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:270)
        ... 3 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: 
java.lang.NumberFormatException: Zero length BigInteger
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:808)
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:261)
        ... 3 more
Caused by: java.lang.NumberFormatException: Zero length BigInteger
        at java.math.BigInteger.<init>(BigInteger.java:171)
        at 
org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:96)
        at 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryHiveDecimal.init(LazyBinaryHiveDecimal.java:48)
        at 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryStruct.uncheckedGetField(LazyBinaryStruct.java:216)
        at 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryStruct.getField(LazyBinaryStruct.java:197)
        at 
org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStructObjectInspector.getStructFieldData(LazyBinaryStructObjectInspector.java:64)
        at 
org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage$AbstractGenericUDAFAverageEvaluator.merge(GenericUDAFAverage.java:353)
        at 
org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.aggregate(GenericUDAFEvaluator.java:186)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.updateAggregations(GroupByOperator.java:641)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processAggr(GroupByOperator.java:905)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processKey(GroupByOperator.java:737)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:803)
        ... 4 more
{noformat}

  was:
The following test fails:

{code}
hive> describe dec4;
OK
key                     string                  from deserializer   
c1                      string                  from deserializer   
c2                      decimal(10,2)           from deserializer   
Time taken: 0.716 seconds, Fetched: 3 row(s)
hive> select * from dec4;
OK
484     484     484
98      NULL    NULL
278     NULL    NULL
255     255     255
409     NULL    NULL
165     165     165
27      27      27
311     NULL    NULL
86      NULL    NULL
238     NULL    NULL
Time taken: 0.262 seconds, Fetched: 10 row(s)
hive> select avg(cast(key as decimal(3,0))) from dec4;
...

Task failed!
Task ID:
  Stage-1

Logs:

/tmp/jdere/hive.log
FAILED: Execution Error, return code 2 from 
org.apache.hadoop.hive.ql.exec.mr.MapRedTask
{code}

The logs show the following stack trace. 

{noformat}
java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: 
Hive Runtime Error while processing row (tag=0) [Error getting row data with 
exception java.lang.NumberFormatException: Zero length BigInteger
        at java.math.BigInteger.<init>(BigInteger.java:171)
        at 
org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:85)
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector.getPrimitiveJavaObject(WritableHiveDecimalObjectInspector.java:43)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:322)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:265)
        at 
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
        at 
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
 ]
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:282)
        at 
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
        at 
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error 
while processing row (tag=0) [Error getting row data with exception 
java.lang.NumberFormatException: Zero length BigInteger
        at java.math.BigInteger.<init>(BigInteger.java:171)
        at 
org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:85)
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector.getPrimitiveJavaObject(WritableHiveDecimalObjectInspector.java:43)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:322)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
        at 
org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
        at 
org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:265)
        at 
org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
        at 
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
 ]
{noformat}


> AVG() failure with decimal type
> -------------------------------
>
>                 Key: HIVE-6522
>                 URL: https://issues.apache.org/jira/browse/HIVE-6522
>             Project: Hive
>          Issue Type: Bug
>          Components: UDF
>    Affects Versions: 0.13.0
>            Reporter: Jason Dere
>
> The following test fails:
> {code}
> hive> describe dec4;
> OK
> key                   string                  from deserializer   
> c1                    string                  from deserializer   
> c2                    decimal(10,2)           from deserializer   
> Time taken: 0.716 seconds, Fetched: 3 row(s)
> hive> select * from dec4;
> OK
> 484   484     484
> 98    NULL    NULL
> 278   NULL    NULL
> 255   255     255
> 409   NULL    NULL
> 165   165     165
> 27    27      27
> 311   NULL    NULL
> 86    NULL    NULL
> 238   NULL    NULL
> Time taken: 0.262 seconds, Fetched: 10 row(s)
> hive> select avg(cast(key as decimal(3,0))) from dec4;
> ...
> Task failed!
> Task ID:
>   Stage-1
> Logs:
> /tmp/jdere/hive.log
> FAILED: Execution Error, return code 2 from 
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask
> {code}
> The logs show the following stack trace. 
> {noformat}
> java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: 
> Hive Runtime Error while processing row (tag=0) [Error getting row data with 
> exception java.lang.NumberFormatException: Zero length BigInteger
>       at java.math.BigInteger.<init>(BigInteger.java:171)
>       at 
> org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:85)
>       at 
> org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector.getPrimitiveJavaObject(WritableHiveDecimalObjectInspector.java:43)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:322)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
>       at 
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:265)
>       at 
> org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
>       at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
>       at 
> org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
>  ]
>       at 
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:282)
>       at 
> org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
>       at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
>       at 
> org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime 
> Error while processing row (tag=0) [Error getting row data with exception 
> java.lang.NumberFormatException: Zero length BigInteger
>       at java.math.BigInteger.<init>(BigInteger.java:171)
>       at 
> org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:85)
>       at 
> org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector.getPrimitiveJavaObject(WritableHiveDecimalObjectInspector.java:43)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:322)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.buildJSONString(SerDeUtils.java:392)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:236)
>       at 
> org.apache.hadoop.hive.serde2.SerDeUtils.getJSONString(SerDeUtils.java:222)
>       at 
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:265)
>       at 
> org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:462)
>       at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:408)
>       at 
> org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:443)
>  ]
>       at 
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:270)
>       ... 3 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: 
> java.lang.NumberFormatException: Zero length BigInteger
>       at 
> org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:808)
>       at 
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:261)
>       ... 3 more
> Caused by: java.lang.NumberFormatException: Zero length BigInteger
>       at java.math.BigInteger.<init>(BigInteger.java:171)
>       at 
> org.apache.hadoop.hive.serde2.io.HiveDecimalWritable.getHiveDecimal(HiveDecimalWritable.java:96)
>       at 
> org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryHiveDecimal.init(LazyBinaryHiveDecimal.java:48)
>       at 
> org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryStruct.uncheckedGetField(LazyBinaryStruct.java:216)
>       at 
> org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryStruct.getField(LazyBinaryStruct.java:197)
>       at 
> org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStructObjectInspector.getStructFieldData(LazyBinaryStructObjectInspector.java:64)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage$AbstractGenericUDAFAverageEvaluator.merge(GenericUDAFAverage.java:353)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.aggregate(GenericUDAFEvaluator.java:186)
>       at 
> org.apache.hadoop.hive.ql.exec.GroupByOperator.updateAggregations(GroupByOperator.java:641)
>       at 
> org.apache.hadoop.hive.ql.exec.GroupByOperator.processAggr(GroupByOperator.java:905)
>       at 
> org.apache.hadoop.hive.ql.exec.GroupByOperator.processKey(GroupByOperator.java:737)
>       at 
> org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:803)
>       ... 4 more
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.1.5#6160)

Reply via email to