Github user kiszk commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21021#discussion_r185378543
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -191,28 +161,202 @@ case class SortArray(base: Expression, 
ascendingOrder: Expression)
             if (o1 == null && o2 == null) {
               0
             } else if (o1 == null) {
    -          1
    +          -nullOrder
             } else if (o2 == null) {
    -          -1
    +          nullOrder
             } else {
               -ordering.compare(o1, o2)
             }
           }
         }
       }
     
    -  override def nullSafeEval(array: Any, ascending: Any): Any = {
    -    val elementType = base.dataType.asInstanceOf[ArrayType].elementType
    +  def elementType: DataType = 
arrayExpression.dataType.asInstanceOf[ArrayType].elementType
    +
    +  def sortEval(array: Any, ascending: Boolean): Any = {
         val data = array.asInstanceOf[ArrayData].toArray[AnyRef](elementType)
         if (elementType != NullType) {
    -      java.util.Arrays.sort(data, if (ascending.asInstanceOf[Boolean]) lt 
else gt)
    +      java.util.Arrays.sort(data, if (ascending) lt else gt)
         }
         new GenericArrayData(data.asInstanceOf[Array[Any]])
       }
     
    +  def sortCodegen(ctx: CodegenContext, ev: ExprCode, base: String, order: 
String): String = {
    +    val arrayData = classOf[ArrayData].getName
    +    val genericArrayData = classOf[GenericArrayData].getName
    +    val array = ctx.freshName("array")
    +    val c = ctx.freshName("c")
    +    val dataTypes = elementType match {
    +      case DecimalType.Fixed(p, s) =>
    +        s"org.apache.spark.sql.types.DataTypes.createDecimalType($p, $s)"
    +      case ArrayType(et, cn) =>
    +        val dt = s"org.apache.spark.sql.types.$et$$.MODULE$$"
    +        s"org.apache.spark.sql.types.DataTypes.createArrayType($dt, $cn)"
    +      case StructType(f) =>
    +        "org.apache.spark.sql.types.StructType$.MODULE$." +
    +          s"apply(new java.util.ArrayList(${f.length}))"
    +      case _ =>
    +        s"org.apache.spark.sql.types.$elementType$$.MODULE$$"
    +    }
    --- End diff --
    
    Definitely, I added some complex test cases with nests.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to