zhengruifeng commented on code in PR #38867:
URL: https://github.com/apache/spark/pull/38867#discussion_r1069045751


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala:
##########
@@ -4601,6 +4601,230 @@ case class ArrayExcept(left: Expression, right: 
Expression) extends ArrayBinaryL
     newLeft: Expression, newRight: Expression): ArrayExcept = copy(left = 
newLeft, right = newRight)
 }
 
+@ExpressionDescription(
+  usage = "_FUNC_(x, pos, val) - Places val into index pos of array x (array 
indices start at 1)",

Review Comment:
   ```suggestion
     usage = "_FUNC_(x, pos, val) - Places val into index pos of array x (array 
indices start at 0)",
   ```



##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala:
##########
@@ -2250,6 +2250,77 @@ class CollectionExpressionsSuite extends SparkFunSuite 
with ExpressionEvalHelper
       Seq(2d))
   }
 
+  test("Array Insert") {
+    val a1 = Literal.create(Seq(1, 2, 4), ArrayType(IntegerType))
+    val a2 = Literal.create(Seq(1, 2, null, 4, 5, null), 
ArrayType(IntegerType))
+    val a3 = Literal.create(Seq[Boolean](true, false, true), 
ArrayType(BooleanType))
+    val a4 = Literal.create(Seq[Byte](1, 2, 3, 2), ArrayType(ByteType))
+    val a7 = Literal.create(Seq[Short](1, 2, 3, 2), ArrayType(ShortType))
+    val a9 = Literal.create(Seq[Float](1.1F, 2.2F, 3.3F, 2.2F), 
ArrayType(FloatType))
+    val a11 = Literal.create(Seq[Double](1.1, 2.2, 3.3, 2.2), 
ArrayType(DoubleType))
+    val a13 = Literal.create(Seq(1L, 2L, 4L), ArrayType(LongType))
+    val a15 = Literal.create(Seq("b", "a", "c"), ArrayType(StringType, false))
+    val a16 = Literal.create(Seq("b", null, "a", "g", null), 
ArrayType(StringType, true))
+    val a18 = Literal.create(null, ArrayType(StringType))
+
+    val litTwoInt = Literal.create(2, IntegerType)
+    val litThreeInt = Literal.create(3, IntegerType)
+    val litOneInt = Literal.create(1, IntegerType)
+    val litFourInt = Literal.create(4, IntegerType)
+    val litNullInt = Literal.create(null, IntegerType)
+    val litZeroInt = Literal.create(0, IntegerType)
+    val litTenInt = Literal.create(10, IntegerType)
+    val litMinusTwoInt = Literal.create(-2, IntegerType)
+    val litMinusTenInt = Literal.create(-10, IntegerType)
+    val litThreeLong = Literal.create(3L, LongType)
+    val litBoolTrue = Literal.create(true, BooleanType)
+    val litThreeByte = Literal.create(5.asInstanceOf[Byte], ByteType)

Review Comment:
   `litFiveByte` ?



##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala:
##########
@@ -2250,6 +2250,77 @@ class CollectionExpressionsSuite extends SparkFunSuite 
with ExpressionEvalHelper
       Seq(2d))
   }
 
+  test("Array Insert") {
+    val a1 = Literal.create(Seq(1, 2, 4), ArrayType(IntegerType))
+    val a2 = Literal.create(Seq(1, 2, null, 4, 5, null), 
ArrayType(IntegerType))
+    val a3 = Literal.create(Seq[Boolean](true, false, true), 
ArrayType(BooleanType))
+    val a4 = Literal.create(Seq[Byte](1, 2, 3, 2), ArrayType(ByteType))
+    val a7 = Literal.create(Seq[Short](1, 2, 3, 2), ArrayType(ShortType))
+    val a9 = Literal.create(Seq[Float](1.1F, 2.2F, 3.3F, 2.2F), 
ArrayType(FloatType))
+    val a11 = Literal.create(Seq[Double](1.1, 2.2, 3.3, 2.2), 
ArrayType(DoubleType))
+    val a13 = Literal.create(Seq(1L, 2L, 4L), ArrayType(LongType))
+    val a15 = Literal.create(Seq("b", "a", "c"), ArrayType(StringType, false))
+    val a16 = Literal.create(Seq("b", null, "a", "g", null), 
ArrayType(StringType, true))
+    val a18 = Literal.create(null, ArrayType(StringType))
+
+    val litTwoInt = Literal.create(2, IntegerType)
+    val litThreeInt = Literal.create(3, IntegerType)
+    val litOneInt = Literal.create(1, IntegerType)
+    val litFourInt = Literal.create(4, IntegerType)
+    val litNullInt = Literal.create(null, IntegerType)
+    val litZeroInt = Literal.create(0, IntegerType)
+    val litTenInt = Literal.create(10, IntegerType)
+    val litMinusTwoInt = Literal.create(-2, IntegerType)
+    val litMinusTenInt = Literal.create(-10, IntegerType)
+    val litThreeLong = Literal.create(3L, LongType)
+    val litBoolTrue = Literal.create(true, BooleanType)
+    val litThreeByte = Literal.create(5.asInstanceOf[Byte], ByteType)
+    val litThreeShort = Literal.create(3.asInstanceOf[Short], ShortType)
+    val litFourFourFloat = Literal.create(4.4F, FloatType)
+    val litFourFourDouble = Literal.create(4.4, DoubleType)
+    val litDString = Literal.create("d", StringType)
+
+    // basic additions per type
+    checkEvaluation(ArrayInsert(a1, litTwoInt, litThreeInt), Seq(1, 2, 3, 4))

Review Comment:
   what about directly using the Literal expressions for `pos` and `value` ?
   
   e.g.
   
   ```suggestion
       checkEvaluation(ArrayInsert(a1, Literal(2), Literal.create(3, 
IntegerType)), Seq(1, 2, 3, 4))
   ```



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala:
##########
@@ -4601,6 +4601,230 @@ case class ArrayExcept(left: Expression, right: 
Expression) extends ArrayBinaryL
     newLeft: Expression, newRight: Expression): ArrayExcept = copy(left = 
newLeft, right = newRight)
 }
 
+@ExpressionDescription(
+  usage = "_FUNC_(x, pos, val) - Places val into index pos of array x (array 
indices start at 1)",
+  examples = """
+    Examples:
+      > SELECT _FUNC_(array(1, 2, 3, 4), 4, 5);
+       [1,2,3,4,5]
+      > SELECT _FUNC_(array(5, 3, 2, 1), -3, 4);
+       [5,4,3,2,1]
+  """,
+  group = "array_funcs",
+  since = "3.4.0")
+case class ArrayInsert(srcArrayExpr: Expression, posExpr: Expression, 
itemExpr: Expression)
+  extends TernaryExpression with ImplicitCastInputTypes with 
ComplexTypeMergingExpression
+    with QueryErrorsBase {
+
+  override def inputTypes: Seq[AbstractDataType] = {
+    (srcArrayExpr.dataType, posExpr.dataType, itemExpr.dataType) match {
+      case (ArrayType(e1, hasNull), e2: IntegralType, e3) if (e2 != LongType) 
=>
+        TypeCoercion.findTightestCommonType(e1, e3) match {
+          case Some(dt) => Seq(ArrayType(dt, hasNull), IntegerType, dt)
+          case _ => Seq.empty
+        }
+      case (e1, e2, e3) => Seq.empty
+    }
+    Seq.empty
+  }
+
+  override def checkInputDataTypes(): TypeCheckResult = {
+    (first.dataType, second.dataType, third.dataType) match {
+      case (_: ArrayType, e2, e3) if e2 != IntegerType =>
+        DataTypeMismatch(
+          errorSubClass = "UNEXPECTED_INPUT_TYPE",
+          messageParameters = Map(
+            "paramIndex" -> "2",
+            "requiredType" -> toSQLType(IntegerType),
+            "inputSql" -> toSQLExpr(second),
+            "inputType" -> toSQLType(second.dataType))
+        )
+      case (ArrayType(e1, _), e2, e3) if e1.sameType(e3) =>
+        TypeCheckResult.TypeCheckSuccess
+      case _ =>
+        DataTypeMismatch(
+          errorSubClass = "ARRAY_FUNCTION_DIFF_TYPES",
+          messageParameters = Map(
+            "functionName" -> toSQLId(prettyName),
+            "dataType" -> toSQLType(ArrayType),
+            "leftType" -> toSQLType(first.dataType),
+            "rightType" -> toSQLType(third.dataType)
+          )
+        )
+    }
+  }
+
+  override def eval(input: InternalRow): Any = {
+    val value1 = first.eval(input)
+    if (value1 != null) {
+      val value2 = second.eval(input)
+      if (value2 != null) {
+        val value3 = third.eval(input)
+        return nullSafeEval(value1, value2, value3)
+      }
+    }
+    null
+  }
+
+  override def nullSafeEval(arr: Any, pos: Any, item: Any): Any = {
+    val baseArr = arr.asInstanceOf[ArrayData]
+    val posInt = pos.asInstanceOf[Int]
+    val arrayElementType = dataType.asInstanceOf[ArrayType].elementType
+
+    val newPosExtendsArrayLeft = posInt < 0 && math.abs(posInt) > 
baseArr.numElements() - 1

Review Comment:
   I feel a bit hard to follow the implementation. What about having 3 code 
paths for it?
   
   1, `pos` >= `baseArr.length`;
   2,  `pos` < `- baseArr.length`;
   3,  otherwise;



##########
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala:
##########
@@ -2250,6 +2250,77 @@ class CollectionExpressionsSuite extends SparkFunSuite 
with ExpressionEvalHelper
       Seq(2d))
   }
 
+  test("Array Insert") {
+    val a1 = Literal.create(Seq(1, 2, 4), ArrayType(IntegerType))
+    val a2 = Literal.create(Seq(1, 2, null, 4, 5, null), 
ArrayType(IntegerType))
+    val a3 = Literal.create(Seq[Boolean](true, false, true), 
ArrayType(BooleanType))
+    val a4 = Literal.create(Seq[Byte](1, 2, 3, 2), ArrayType(ByteType))
+    val a7 = Literal.create(Seq[Short](1, 2, 3, 2), ArrayType(ShortType))
+    val a9 = Literal.create(Seq[Float](1.1F, 2.2F, 3.3F, 2.2F), 
ArrayType(FloatType))
+    val a11 = Literal.create(Seq[Double](1.1, 2.2, 3.3, 2.2), 
ArrayType(DoubleType))
+    val a13 = Literal.create(Seq(1L, 2L, 4L), ArrayType(LongType))
+    val a15 = Literal.create(Seq("b", "a", "c"), ArrayType(StringType, false))
+    val a16 = Literal.create(Seq("b", null, "a", "g", null), 
ArrayType(StringType, true))
+    val a18 = Literal.create(null, ArrayType(StringType))
+
+    val litTwoInt = Literal.create(2, IntegerType)
+    val litThreeInt = Literal.create(3, IntegerType)
+    val litOneInt = Literal.create(1, IntegerType)
+    val litFourInt = Literal.create(4, IntegerType)
+    val litNullInt = Literal.create(null, IntegerType)
+    val litZeroInt = Literal.create(0, IntegerType)
+    val litTenInt = Literal.create(10, IntegerType)
+    val litMinusTwoInt = Literal.create(-2, IntegerType)
+    val litMinusTenInt = Literal.create(-10, IntegerType)
+    val litThreeLong = Literal.create(3L, LongType)
+    val litBoolTrue = Literal.create(true, BooleanType)
+    val litThreeByte = Literal.create(5.asInstanceOf[Byte], ByteType)
+    val litThreeShort = Literal.create(3.asInstanceOf[Short], ShortType)
+    val litFourFourFloat = Literal.create(4.4F, FloatType)
+    val litFourFourDouble = Literal.create(4.4, DoubleType)
+    val litDString = Literal.create("d", StringType)
+
+    // basic additions per type
+    checkEvaluation(ArrayInsert(a1, litTwoInt, litThreeInt), Seq(1, 2, 3, 4))
+    checkEvaluation(
+      ArrayInsert(a3, litTwoInt, litBoolTrue),
+      Seq[Boolean](true, false, true, true)
+    )
+    checkEvaluation(ArrayInsert(a4, litTwoInt, litThreeByte), Seq[Byte](1, 2, 
5, 3, 2))

Review Comment:
   why there is a `5`?



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala:
##########
@@ -4601,6 +4601,230 @@ case class ArrayExcept(left: Expression, right: 
Expression) extends ArrayBinaryL
     newLeft: Expression, newRight: Expression): ArrayExcept = copy(left = 
newLeft, right = newRight)
 }
 
+@ExpressionDescription(
+  usage = "_FUNC_(x, pos, val) - Places val into index pos of array x (array 
indices start at 1)",
+  examples = """
+    Examples:
+      > SELECT _FUNC_(array(1, 2, 3, 4), 4, 5);
+       [1,2,3,4,5]
+      > SELECT _FUNC_(array(5, 3, 2, 1), -3, 4);
+       [5,4,3,2,1]
+  """,
+  group = "array_funcs",
+  since = "3.4.0")
+case class ArrayInsert(srcArrayExpr: Expression, posExpr: Expression, 
itemExpr: Expression)
+  extends TernaryExpression with ImplicitCastInputTypes with 
ComplexTypeMergingExpression
+    with QueryErrorsBase {
+
+  override def inputTypes: Seq[AbstractDataType] = {
+    (srcArrayExpr.dataType, posExpr.dataType, itemExpr.dataType) match {
+      case (ArrayType(e1, hasNull), e2: IntegralType, e3) if (e2 != LongType) 
=>
+        TypeCoercion.findTightestCommonType(e1, e3) match {
+          case Some(dt) => Seq(ArrayType(dt, hasNull), IntegerType, dt)
+          case _ => Seq.empty
+        }
+      case (e1, e2, e3) => Seq.empty
+    }
+    Seq.empty
+  }
+
+  override def checkInputDataTypes(): TypeCheckResult = {
+    (first.dataType, second.dataType, third.dataType) match {
+      case (_: ArrayType, e2, e3) if e2 != IntegerType =>
+        DataTypeMismatch(
+          errorSubClass = "UNEXPECTED_INPUT_TYPE",
+          messageParameters = Map(
+            "paramIndex" -> "2",
+            "requiredType" -> toSQLType(IntegerType),
+            "inputSql" -> toSQLExpr(second),
+            "inputType" -> toSQLType(second.dataType))
+        )
+      case (ArrayType(e1, _), e2, e3) if e1.sameType(e3) =>
+        TypeCheckResult.TypeCheckSuccess
+      case _ =>
+        DataTypeMismatch(
+          errorSubClass = "ARRAY_FUNCTION_DIFF_TYPES",
+          messageParameters = Map(
+            "functionName" -> toSQLId(prettyName),
+            "dataType" -> toSQLType(ArrayType),
+            "leftType" -> toSQLType(first.dataType),
+            "rightType" -> toSQLType(third.dataType)
+          )
+        )
+    }
+  }
+
+  override def eval(input: InternalRow): Any = {
+    val value1 = first.eval(input)
+    if (value1 != null) {
+      val value2 = second.eval(input)
+      if (value2 != null) {
+        val value3 = third.eval(input)
+        return nullSafeEval(value1, value2, value3)
+      }
+    }
+    null
+  }
+
+  override def nullSafeEval(arr: Any, pos: Any, item: Any): Any = {
+    val baseArr = arr.asInstanceOf[ArrayData]
+    val posInt = pos.asInstanceOf[Int]
+    val arrayElementType = dataType.asInstanceOf[ArrayType].elementType
+
+    val newPosExtendsArrayLeft = posInt < 0 && math.abs(posInt) > 
baseArr.numElements() - 1

Review Comment:
   ```suggestion
       val newPosExtendsArrayLeft = posInt < 0 && -posInt > 
baseArr.numElements() - 1
   ```



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala:
##########
@@ -4601,6 +4601,230 @@ case class ArrayExcept(left: Expression, right: 
Expression) extends ArrayBinaryL
     newLeft: Expression, newRight: Expression): ArrayExcept = copy(left = 
newLeft, right = newRight)
 }
 
+@ExpressionDescription(
+  usage = "_FUNC_(x, pos, val) - Places val into index pos of array x (array 
indices start at 1)",
+  examples = """
+    Examples:
+      > SELECT _FUNC_(array(1, 2, 3, 4), 4, 5);
+       [1,2,3,4,5]
+      > SELECT _FUNC_(array(5, 3, 2, 1), -3, 4);
+       [5,4,3,2,1]
+  """,
+  group = "array_funcs",
+  since = "3.4.0")
+case class ArrayInsert(srcArrayExpr: Expression, posExpr: Expression, 
itemExpr: Expression)
+  extends TernaryExpression with ImplicitCastInputTypes with 
ComplexTypeMergingExpression
+    with QueryErrorsBase {
+
+  override def inputTypes: Seq[AbstractDataType] = {
+    (srcArrayExpr.dataType, posExpr.dataType, itemExpr.dataType) match {
+      case (ArrayType(e1, hasNull), e2: IntegralType, e3) if (e2 != LongType) 
=>
+        TypeCoercion.findTightestCommonType(e1, e3) match {
+          case Some(dt) => Seq(ArrayType(dt, hasNull), IntegerType, dt)
+          case _ => Seq.empty
+        }
+      case (e1, e2, e3) => Seq.empty
+    }
+    Seq.empty
+  }
+
+  override def checkInputDataTypes(): TypeCheckResult = {
+    (first.dataType, second.dataType, third.dataType) match {
+      case (_: ArrayType, e2, e3) if e2 != IntegerType =>
+        DataTypeMismatch(
+          errorSubClass = "UNEXPECTED_INPUT_TYPE",
+          messageParameters = Map(
+            "paramIndex" -> "2",
+            "requiredType" -> toSQLType(IntegerType),
+            "inputSql" -> toSQLExpr(second),
+            "inputType" -> toSQLType(second.dataType))
+        )
+      case (ArrayType(e1, _), e2, e3) if e1.sameType(e3) =>
+        TypeCheckResult.TypeCheckSuccess
+      case _ =>
+        DataTypeMismatch(
+          errorSubClass = "ARRAY_FUNCTION_DIFF_TYPES",
+          messageParameters = Map(
+            "functionName" -> toSQLId(prettyName),
+            "dataType" -> toSQLType(ArrayType),
+            "leftType" -> toSQLType(first.dataType),
+            "rightType" -> toSQLType(third.dataType)
+          )
+        )
+    }
+  }
+
+  override def eval(input: InternalRow): Any = {
+    val value1 = first.eval(input)
+    if (value1 != null) {
+      val value2 = second.eval(input)
+      if (value2 != null) {
+        val value3 = third.eval(input)
+        return nullSafeEval(value1, value2, value3)
+      }
+    }
+    null
+  }
+
+  override def nullSafeEval(arr: Any, pos: Any, item: Any): Any = {
+    val baseArr = arr.asInstanceOf[ArrayData]
+    val posInt = pos.asInstanceOf[Int]
+    val arrayElementType = dataType.asInstanceOf[ArrayType].elementType
+
+    val newPosExtendsArrayLeft = posInt < 0 && math.abs(posInt) > 
baseArr.numElements() - 1
+
+    val itemInsertionIndex = if (newPosExtendsArrayLeft) {
+      0
+    } else if (posInt < 0) {
+      posInt + baseArr.numElements()
+    } else {
+      posInt
+    }
+
+    val newArrayLength = if (newPosExtendsArrayLeft) {
+      math.abs(posInt) + 1
+    } else {
+      math.max(baseArr.numElements() + 1, itemInsertionIndex + 1)
+    }
+
+    val newArray = new Array[Any](newArrayLength)

Review Comment:
   ```
       if (newArrayLength > ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) {
         throw 
QueryExecutionErrors.concatArraysWithElementsExceedLimitError(newArrayLength)
       }
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to