[ 
https://issues.apache.org/jira/browse/SPARK-31552?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Dongjoon Hyun updated SPARK-31552:
----------------------------------
    Affects Version/s: 2.2.3

> Fix potential ClassCastException in ScalaReflection arrayClassFor
> -----------------------------------------------------------------
>
>                 Key: SPARK-31552
>                 URL: https://issues.apache.org/jira/browse/SPARK-31552
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 2.2.3, 2.3.4, 2.4.5, 3.0.0, 3.1.0
>            Reporter: Kent Yao
>            Priority: Major
>
> arrayClassFor and dataTypeFor in ScalaReflection call each other circularly, 
> the cases in dataTypeFor are not fully handled in arrayClassFor
> For example:
> {code:java}
> scala> import scala.reflect.runtime.universe.TypeTag
> scala> import org.apache.spark.sql._
> scala> import org.apache.spark.sql.catalyst.encoders._
> scala> import org.apache.spark.sql.types._
> scala> implicit def newArrayEncoder[T <: Array[_] : TypeTag]: Encoder[T] = 
> ExpressionEncoder()
> newArrayEncoder: [T <: Array[_]](implicit evidence$1: 
> reflect.runtime.universe.TypeTag[T])org.apache.spark.sql.Encoder[T]
> scala> val decOne = Decimal(1, 38, 18)
> decOne: org.apache.spark.sql.types.Decimal = 1E-18
> scala>     val decTwo = Decimal(2, 38, 18)
> decTwo: org.apache.spark.sql.types.Decimal = 2E-18
> scala>     val decSpark = Array(decOne, decTwo)
> decSpark: Array[org.apache.spark.sql.types.Decimal] = Array(1E-18, 2E-18)
> scala> Seq(decSpark).toDF()
> java.lang.ClassCastException: org.apache.spark.sql.types.DecimalType cannot 
> be cast to org.apache.spark.sql.types.ObjectType
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$arrayClassFor$1(ScalaReflection.scala:131)
>   at 
> scala.reflect.internal.tpe.TypeConstraints$UndoLog.undo(TypeConstraints.scala:69)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects(ScalaReflection.scala:879)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects$(ScalaReflection.scala:878)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.cleanUpReflectionObjects(ScalaReflection.scala:49)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.arrayClassFor(ScalaReflection.scala:120)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$dataTypeFor$1(ScalaReflection.scala:105)
>   at 
> scala.reflect.internal.tpe.TypeConstraints$UndoLog.undo(TypeConstraints.scala:69)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects(ScalaReflection.scala:879)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects$(ScalaReflection.scala:878)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.cleanUpReflectionObjects(ScalaReflection.scala:49)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.dataTypeFor(ScalaReflection.scala:88)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$serializerForType$1(ScalaReflection.scala:399)
>   at 
> scala.reflect.internal.tpe.TypeConstraints$UndoLog.undo(TypeConstraints.scala:69)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects(ScalaReflection.scala:879)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects$(ScalaReflection.scala:878)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.cleanUpReflectionObjects(ScalaReflection.scala:49)
>   at 
> org.apache.spark.sql.catalyst.ScalaReflection$.serializerForType(ScalaReflection.scala:393)
>   at 
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$.apply(ExpressionEncoder.scala:57)
>   at newArrayEncoder(<console>:57)
>   ... 53 elided
> scala>
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to