Github user HyukjinKwon commented on a diff in the pull request: https://github.com/apache/spark/pull/22646#discussion_r225016843 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala --- @@ -1115,9 +1126,38 @@ object SQLContext { }) } } - def createConverter(cls: Class[_], dataType: DataType): Any => Any = dataType match { - case struct: StructType => createStructConverter(cls, struct.map(_.dataType)) - case _ => CatalystTypeConverters.createToCatalystConverter(dataType) + def createConverter(t: Type, dataType: DataType): Any => Any = (t, dataType) match { --- End diff -- BTW, how about we put this method in `CatalystTypeConverters`? Looks it is a Catalyst converter for beans. Few Java types like `java.lang.Iterable`, `java.math.BigDecimal` and `java.math.BigInteger` are being handled there.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org