Reynold Xin created SPARK-8279:
----------------------------------
Summary: udf_round_3 test fails
Key: SPARK-8279
URL: https://issues.apache.org/jira/browse/SPARK-8279
Project: Spark
Issue Type: Sub-task
Reporter: Reynold Xin
Priority: Blocker
query
{code}
select round(cast(negative(pow(2, 31)) as INT)), round(cast((pow(2, 31) - 1) as
INT)), round(-32769), round(32768) from src tablesample (1 rows);
{code}
{code}
[info] - udf_round_3 *** FAILED *** (4 seconds, 803 milliseconds)
[info] Failed to execute query using catalyst:
[info] Error: java.lang.Integer cannot be cast to java.lang.Double
[info] java.lang.ClassCastException: java.lang.Integer cannot be cast to
java.lang.Double
[info] at
scala.runtime.BoxesRunTime.unboxToDouble(BoxesRunTime.java:119)
[info] at
org.apache.spark.sql.catalyst.expressions.BinaryMathExpression.eval(math.scala:86)
[info] at
org.apache.spark.sql.hive.HiveInspectors$class.toInspector(HiveInspectors.scala:628)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.toInspector(hiveUdfs.scala:148)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf$$anonfun$argumentInspectors$1.apply(hiveUdfs.scala:160)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf$$anonfun$argumentInspectors$1.apply(hiveUdfs.scala:160)
[info] at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
[info] at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
[info] at scala.collection.immutable.List.foreach(List.scala:318)
[info] at
scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
[info] at
scala.collection.AbstractTraversable.map(Traversable.scala:105)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.argumentInspectors$lzycompute(hiveUdfs.scala:160)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.argumentInspectors(hiveUdfs.scala:160)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.returnInspector$lzycompute(hiveUdfs.scala:164)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.returnInspector(hiveUdfs.scala:163)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.dataType$lzycompute(hiveUdfs.scala:180)
[info] at
org.apache.spark.sql.hive.HiveGenericUdf.dataType(hiveUdfs.scala:180)
[info] at
org.apache.spark.sql.catalyst.expressions.Cast.resolved$lzycompute(Cast.scala:31)
[info] at
org.apache.spark.sql.catalyst.expressions.Cast.resolved(Cast.scala:31)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$childrenResolved$1.apply(Expression.scala:121)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$childrenResolved$1.apply(Expression.scala:121)
[info] at
scala.collection.LinearSeqOptimized$class.forall(LinearSeqOptimized.scala:70)
[info] at scala.collection.immutable.List.forall(List.scala:84)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression.childrenResolved(Expression.scala:121)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression.resolved$lzycompute(Expression.scala:109)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression.resolved(Expression.scala:109)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$childrenResolved$1.apply(Expression.scala:121)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$childrenResolved$1.apply(Expression.scala:121)
[info] at
scala.collection.LinearSeqOptimized$class.forall(LinearSeqOptimized.scala:70)
[info] at scala.collection.immutable.List.forall(List.scala:84)
[info] at
org.apache.spark.sql.catalyst.expressions.Expression.childrenResolved(Expression.scala:121)
[info] at
org.apache.spark.sql.catalyst.analysis.HiveTypeCoercion$ConvertNaNs$$anonfun$apply$2$$anonfun$applyOrElse$2.applyOrElse(HiveTypeCoercion.scala:138)
[info] at
org.apache.spark.sql.catalyst.analysis.HiveTypeCoercion$ConvertNaNs$$anonfun$apply$2$$anonfun$applyOrElse$2.applyOrElse(HiveTypeCoercion.scala:136)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222)
[info] at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:221)
[info] at
org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1(QueryPlan.scala:75)
[info] at
org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1$$anonfun$apply$1.apply(QueryPlan.scala:90)
[info] at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
[info] at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
[info] at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
[info] at
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
[info] at
scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
[info] at
scala.collection.AbstractTraversable.map(Traversable.scala:105)
[info] at
org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:89)
[info] at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
[info] at scala.collection.Iterator$class.foreach(Iterator.scala:727)
[info] at
scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
[info] at
scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
[info] at
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
[info] at
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
[info] at
scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
[info] at scala.collection.AbstractIterator.to(Iterator.scala:1157)
[info] at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
[info] at
scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
[info] at
scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
[info] at
scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
{code}
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]