Github user ueshin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22527#discussion_r222520022
  
    --- Diff: sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala ---
    @@ -1098,16 +1098,26 @@ object SQLContext {
           data: Iterator[_],
           beanClass: Class[_],
           attrs: Seq[AttributeReference]): Iterator[InternalRow] = {
    -    val extractors =
    -      
JavaTypeInference.getJavaBeanReadableProperties(beanClass).map(_.getReadMethod)
    -    val methodsToConverts = extractors.zip(attrs).map { case (e, attr) =>
    -      (e, CatalystTypeConverters.createToCatalystConverter(attr.dataType))
    +    def createStructConverter(cls: Class[_], fieldTypes: Seq[DataType]): 
Any => InternalRow = {
    +      val methodConverters =
    +        
JavaTypeInference.getJavaBeanReadableProperties(cls).zip(fieldTypes)
    +          .map { case (property, fieldType) =>
    +            val method = property.getReadMethod
    +            method -> createConverter(method.getReturnType, fieldType)
    +          }
    +      value =>
    +        if (value == null) null
    +        else new GenericInternalRow(
    +          methodConverters.map { case (method, converter) =>
    +            converter(method.invoke(value))
    +          })
    --- End diff --
    
    nit: please use braces for multi-lined if-else.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to