Github user kiszk commented on a diff in the pull request:
https://github.com/apache/spark/pull/21069#discussion_r190485891
--- Diff:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
---
@@ -1882,3 +1882,123 @@ case class ArrayRepeat(left: Expression, right:
Expression)
}
}
+
+/**
+ * Remove all elements that equal to element from the given array
+ */
+@ExpressionDescription(
+ usage = "_FUNC_(array, element) - Remove all elements that equal to
element from array.",
+ examples = """
+ Examples:
+ > SELECT _FUNC_(array(1, 2, 3, null, 3), 3);
+ [1,2,null]
+ """, since = "2.4.0")
+case class ArrayRemove(left: Expression, right: Expression)
+ extends BinaryExpression with ImplicitCastInputTypes {
+
+ override def dataType: DataType = left.dataType
+
+ override def inputTypes: Seq[AbstractDataType] =
+ Seq(ArrayType, left.dataType.asInstanceOf[ArrayType].elementType)
+
+ lazy val elementType: DataType =
left.dataType.asInstanceOf[ArrayType].elementType
+
+ @transient private lazy val ordering: Ordering[Any] =
+ TypeUtils.getInterpretedOrdering(right.dataType)
+
+ override def checkInputDataTypes(): TypeCheckResult = {
+ if (!left.dataType.isInstanceOf[ArrayType]
+ || left.dataType.asInstanceOf[ArrayType].elementType !=
right.dataType) {
+ TypeCheckResult.TypeCheckFailure(
+ "Arguments must be an array followed by a value of same type as
the array members")
+ } else {
+ TypeUtils.checkForOrderingExpr(right.dataType, s"function
$prettyName")
+ }
+ }
+
+ override def nullSafeEval(arr: Any, value: Any): Any = {
+ val newArray = new
Array[Any](arr.asInstanceOf[ArrayData].numElements())
+ var pos = 0
+ arr.asInstanceOf[ArrayData].foreach(right.dataType, (i, v) =>
+ if (v == null) {
+ if (value != null) {
--- End diff --
nit: Do we need this check since we are in `nullSafeEval`?
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]