This is an automated email from the ASF dual-hosted git repository.
gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 58256a5b401 [SPARK-42070][SQL] Change the default value of argument of
Mask function from -1 to NULL
58256a5b401 is described below
commit 58256a5b4011ee4465d13ed0e37e235091522db0
Author: Vinod KC <[email protected]>
AuthorDate: Thu Jan 19 22:40:04 2023 -0800
[SPARK-42070][SQL] Change the default value of argument of Mask function
from -1 to NULL
### What changes were proposed in this pull request?
In the udf 'mask', using -1 as ignore parameter in the position of String
type argument is not a standard way, need to change the value of ignore
argument from -1 to NULL.
This PR changes the value of ignore argument in mask udf from -1 to NULL.
### Why are the changes needed?
To standardize the imput value and type
### Does this PR introduce _any_ user-facing change?
Yes, value of ignore params changed from -1 to NULL
### How was this patch tested?
Added Tests
Closes #39577 from vinodkc/br_change_mask_udf.
Authored-by: Vinod KC <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
---
.../sql/catalyst/expressions/maskExpressions.scala | 157 +++++----
.../expressions/StringExpressionsSuite.scala | 58 +---
.../sql-functions/sql-expression-schema.md | 2 +-
.../resources/sql-tests/inputs/mask-functions.sql | 52 ++-
.../sql-tests/results/mask-functions.sql.out | 384 +++++----------------
5 files changed, 219 insertions(+), 434 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala
index 651ac74852b..e2828f35232 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala
@@ -17,27 +17,29 @@
package org.apache.spark.sql.catalyst.expressions
+import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
import org.apache.spark.sql.catalyst.expressions.codegen._
+import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.errors.QueryErrorsBase
import org.apache.spark.sql.types.{AbstractDataType, DataType, StringType}
import org.apache.spark.unsafe.types.UTF8String
// scalastyle:off line.size.limit
@ExpressionDescription(
- usage = """_FUNC_(input[, upperChar, lowerChar, digitChar, otherChar]) -
masks the given string value.
+ usage =
+ """_FUNC_(input[, upperChar, lowerChar, digitChar, otherChar]) - masks the
given string value.
The function replaces characters with 'X' or 'x', and numbers with 'n'.
This can be useful for creating copies of tables with sensitive
information removed.
- Error behavior: null value as replacement argument will throw
AnalysisError.
""",
arguments = """
Arguments:
* input - string value to mask. Supported types: STRING, VARCHAR,
CHAR
- * upperChar - character to replace upper-case characters with. Specify
-1 to retain original character. Default value: 'X'
- * lowerChar - character to replace lower-case characters with. Specify
-1 to retain original character. Default value: 'x'
- * digitChar - character to replace digit characters with. Specify -1 to
retain original character. Default value: 'n'
- * otherChar - character to replace all other characters with. Specify
-1 to retain original character. Default value: -1
+ * upperChar - character to replace upper-case characters with. Specify
NULL to retain original character. Default value: 'X'
+ * lowerChar - character to replace lower-case characters with. Specify
NULL to retain original character. Default value: 'x'
+ * digitChar - character to replace digit characters with. Specify NULL
to retain original character. Default value: 'n'
+ * otherChar - character to replace all other characters with. Specify
NULL to retain original character. Default value: NULL
""",
examples = """
Examples:
@@ -57,17 +59,17 @@ import org.apache.spark.unsafe.types.UTF8String
QqQQddd-@$#
> SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd', 'o');
QqQQdddoooo
- > SELECT _FUNC_('AbCD123-@$#', -1, 'q', 'd', 'o');
+ > SELECT _FUNC_('AbCD123-@$#', NULL, 'q', 'd', 'o');
AqCDdddoooo
- > SELECT _FUNC_('AbCD123-@$#', -1, -1, 'd', 'o');
+ > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, 'd', 'o');
AbCDdddoooo
- > SELECT _FUNC_('AbCD123-@$#', -1, -1, -1, 'o');
+ > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, 'o');
AbCD123oooo
- > SELECT _FUNC_(NULL, -1, -1, -1, 'o');
+ > SELECT _FUNC_(NULL, NULL, NULL, NULL, 'o');
NULL
> SELECT _FUNC_(NULL);
NULL
- > SELECT _FUNC_('AbCD123-@$#', -1, -1, -1, -1);
+ > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, NULL);
AbCD123-@$#
""",
since = "3.4.0",
@@ -80,9 +82,8 @@ case class Mask(
digitChar: Expression,
otherChar: Expression)
extends QuinaryExpression
- with ImplicitCastInputTypes
- with QueryErrorsBase
- with NullIntolerant {
+ with ExpectsInputTypes
+ with QueryErrorsBase {
def this(input: Expression) =
this(
@@ -90,7 +91,7 @@ case class Mask(
Literal(Mask.MASKED_UPPERCASE),
Literal(Mask.MASKED_LOWERCASE),
Literal(Mask.MASKED_DIGIT),
- Literal(Mask.MASKED_IGNORE))
+ Literal(Mask.MASKED_IGNORE, StringType))
def this(input: Expression, upperChar: Expression) =
this(
@@ -98,17 +99,22 @@ case class Mask(
upperChar,
Literal(Mask.MASKED_LOWERCASE),
Literal(Mask.MASKED_DIGIT),
- Literal(Mask.MASKED_IGNORE))
+ Literal(Mask.MASKED_IGNORE, StringType))
def this(input: Expression, upperChar: Expression, lowerChar: Expression) =
- this(input, upperChar, lowerChar, Literal(Mask.MASKED_DIGIT),
Literal(Mask.MASKED_IGNORE))
+ this(
+ input,
+ upperChar,
+ lowerChar,
+ Literal(Mask.MASKED_DIGIT),
+ Literal(Mask.MASKED_IGNORE, StringType))
def this(
input: Expression,
upperChar: Expression,
lowerChar: Expression,
digitChar: Expression) =
- this(input, upperChar, lowerChar, digitChar, Literal(Mask.MASKED_IGNORE))
+ this(input, upperChar, lowerChar, digitChar, Literal(Mask.MASKED_IGNORE,
StringType))
override def checkInputDataTypes(): TypeCheckResult = {
@@ -123,13 +129,7 @@ case class Mask(
"inputExpr" -> toSQLExpr(exp))))
} else {
val replaceChar = exp.eval()
- if (replaceChar == null) {
- Some(
- DataTypeMismatch(
- errorSubClass = "UNEXPECTED_NULL",
- messageParameters = Map("exprName" -> message)))
- } else if
(!replaceChar.asInstanceOf[UTF8String].toString.equals(Mask.MASKED_IGNORE) &&
- replaceChar.asInstanceOf[UTF8String].numChars != 1) {
+ if (replaceChar != null &&
replaceChar.asInstanceOf[UTF8String].numChars != 1) {
Some(
DataTypeMismatch(
errorSubClass = "INPUT_SIZE_NOT_ONE",
@@ -168,23 +168,20 @@ case class Mask(
override def inputTypes: Seq[AbstractDataType] =
Seq(StringType, StringType, StringType, StringType, StringType)
+ override def nullable: Boolean = true
+
/**
- * Called by default [[eval]] implementation. If subclass of
QuinaryExpression keep the default
- * nullability, they can override this method to save null-check code. If we
need full control
- * of evaluation process, we should override [[eval]].
+ * Default behavior of evaluation according to the default nullability of
QuinaryExpression. If
+ * subclass of QuinaryExpression override nullable, probably should also
override this.
*/
- override protected def nullSafeEval(
- input: Any,
- upperChar: Any,
- lowerChar: Any,
- digitChar: Any,
- otherChar: Any): Any =
+ override def eval(input: InternalRow): Any = {
Mask.transformInput(
- input.asInstanceOf[UTF8String],
- upperChar.asInstanceOf[UTF8String],
- lowerChar.asInstanceOf[UTF8String],
- digitChar.asInstanceOf[UTF8String],
- otherChar.asInstanceOf[UTF8String])
+ children(0).eval(input),
+ children(1).eval(input),
+ children(2).eval(input),
+ children(3).eval(input),
+ children(4).eval(input))
+ }
/**
* Returns Java source code that can be compiled to evaluate this
expression. The default
@@ -207,6 +204,37 @@ case class Mask(
s"transformInput($input, $upperChar, $lowerChar, $digitChar,
$otherChar);"
})
+ /**
+ * Short hand for generating quinary evaluation code. If either of the
sub-expressions is null,
+ * the result of this computation is assumed to be null.
+ *
+ * @param f
+ * function that accepts the 5 non-null evaluation result names of
children and returns Java
+ * code to compute the output.
+ */
+ override protected def nullSafeCodeGen(
+ ctx: CodegenContext,
+ ev: ExprCode,
+ f: (String, String, String, String, String) => String): ExprCode = {
+ val firstGen = children(0).genCode(ctx)
+ val secondGen = children(1).genCode(ctx)
+ val thirdGen = children(2).genCode(ctx)
+ val fourthGen = children(3).genCode(ctx)
+ val fifthGen = children(4).genCode(ctx)
+ val resultCode =
+ f(firstGen.value, secondGen.value, thirdGen.value, fourthGen.value,
fifthGen.value)
+ ev.copy(
+ code = code"""
+ ${firstGen.code}
+ ${secondGen.code}
+ ${thirdGen.code}
+ ${fourthGen.code}
+ ${fifthGen.code}
+ ${CodeGenerator.javaType(dataType)} ${ev.value} =
${CodeGenerator.defaultValue(dataType)};
+ $resultCode""",
+ isNull = FalseLiteral)
+ }
+
/**
* Returns the [[DataType]] of the result of evaluating this expression. It
is invalid to query
* the dataType of an unresolved expression (i.e., when `resolved` == false).
@@ -239,42 +267,41 @@ object Mask {
// Default character to replace digits
private val MASKED_DIGIT = 'n'
// This value helps to retain original value in the input by ignoring the
replacement rules
- private val MASKED_IGNORE = "-1"
-
- private def createMaskArgument(maskArgument: UTF8String): MaskArgument = {
- val maskArgumentStr = maskArgument.toString
- MaskArgument(maskArgumentStr.toString.charAt(0),
MASKED_IGNORE.equals(maskArgumentStr))
- }
+ private val MASKED_IGNORE = null
def transformInput(
- input: UTF8String,
- maskUpper: UTF8String,
- maskLower: UTF8String,
- maskDigit: UTF8String,
- maskOther: UTF8String): UTF8String = {
- val maskUpperArg = createMaskArgument(maskUpper)
- val maskLowerArg = createMaskArgument(maskLower)
- val maskDigitArg = createMaskArgument(maskDigit)
- val markOtherArg = createMaskArgument(maskOther)
+ input: Any,
+ maskUpper: Any,
+ maskLower: Any,
+ maskDigit: Any,
+ maskOther: Any): UTF8String = {
- val transformedString = input.toString.map {
- transformChar(_, maskUpperArg, maskLowerArg, maskDigitArg,
markOtherArg).toChar
+ val transformedString = if (input == null) {
+ null
+ } else {
+ input.toString.map {
+ transformChar(_, maskUpper, maskLower, maskDigit, maskOther).toChar
+ }
}
org.apache.spark.unsafe.types.UTF8String.fromString(transformedString)
}
private def transformChar(
- c: Int,
- maskUpper: MaskArgument,
- maskLower: MaskArgument,
- maskDigit: MaskArgument,
- maskOther: MaskArgument): Int = {
+ c: Char,
+ maskUpper: Any,
+ maskLower: Any,
+ maskDigit: Any,
+ maskOther: Any): Int = {
+
+ def maskedChar(c: Char, option: Any): Char = {
+ if (option != MASKED_IGNORE)
option.asInstanceOf[UTF8String].toString.charAt(0) else c
+ }
Character.getType(c) match {
- case Character.UPPERCASE_LETTER => if (!maskUpper.ignore)
maskUpper.maskChar else c
- case Character.LOWERCASE_LETTER => if (!maskLower.ignore)
maskLower.maskChar else c
- case Character.DECIMAL_DIGIT_NUMBER => if (!maskDigit.ignore)
maskDigit.maskChar else c
- case _ => if (!maskOther.ignore) maskOther.maskChar else c
+ case Character.UPPERCASE_LETTER => maskedChar(c, maskUpper)
+ case Character.LOWERCASE_LETTER => maskedChar(c, maskLower)
+ case Character.DECIMAL_DIGIT_NUMBER => maskedChar(c, maskDigit)
+ case _ => maskedChar(c, maskOther)
}
}
}
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index d46497223a9..144140be830 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -375,12 +375,9 @@ class StringExpressionsSuite extends SparkFunSuite with
ExpressionEvalHelper {
}
test("Mask") {
- def getExpectedValue(expected: String): String =
- if ("null".equals(expected)) null else expected
-
+ val NULL_LITERAL = Literal(null, StringType)
val inputString1 = Literal("AbCD123-@$#")
val inputString2 = Literal("abcd-EFGH-8765-4321")
- val inputString3 = Literal.create(null, StringType)
val firstItem = (
inputString1,
Array(
@@ -405,57 +402,36 @@ class StringExpressionsSuite extends SparkFunSuite with
ExpressionEvalHelper {
"abcd*EFGH*dddd*dddd",
"abcd*EFGH*8765*4321",
"abcd-EFGH-8765-4321"))
- val thirdItem = (
- inputString3,
- Array("null", "null", "null", "null", "null", "null", "null", "null",
"null"))
- Seq(firstItem, secondItem, thirdItem).foreach {
+ Seq(firstItem, secondItem).foreach {
case (input: Literal, expectedList: Array[String]) =>
- checkEvaluation(new Mask(input), getExpectedValue(expectedList(0)))
- checkEvaluation(new Mask(input, Literal('Q')),
getExpectedValue(expectedList(1)))
- checkEvaluation(
- new Mask(input, Literal('Q'), Literal('q')),
- getExpectedValue(expectedList(2)))
+ checkEvaluation(new Mask(input), expectedList(0))
+ checkEvaluation(new Mask(input, Literal('Q')), expectedList(1))
+ checkEvaluation(new Mask(input, Literal('Q'), Literal('q')),
expectedList(2))
checkEvaluation(
new Mask(input, Literal('Q'), Literal('q'), Literal('d')),
- getExpectedValue(expectedList(3)))
+ expectedList(3))
checkEvaluation(
new Mask(input, Literal('Q'), Literal('q'), Literal('d'),
Literal('*')),
- getExpectedValue(expectedList(4)))
+ expectedList(4))
checkEvaluation(
- new Mask(input, Literal("-1"), Literal('q'), Literal('d'),
Literal('*')),
- getExpectedValue(expectedList(5)))
+ new Mask(input, NULL_LITERAL, Literal('q'), Literal('d'),
Literal('*')),
+ expectedList(5))
checkEvaluation(
- new Mask(input, Literal("-1"), Literal("-1"), Literal('d'),
Literal('*')),
- getExpectedValue(expectedList(6)))
+ new Mask(input, NULL_LITERAL, NULL_LITERAL, Literal('d'),
Literal('*')),
+ expectedList(6))
checkEvaluation(
- new Mask(input, Literal("-1"), Literal("-1"), Literal("-1"),
Literal('*')),
- getExpectedValue(expectedList(7)))
+ new Mask(input, NULL_LITERAL, NULL_LITERAL, NULL_LITERAL,
Literal('*')),
+ expectedList(7))
checkEvaluation(
- new Mask(input, Literal("-1"), Literal("-1"), Literal("-1"),
Literal("-1")),
- getExpectedValue(expectedList(8)))
+ new Mask(input, NULL_LITERAL, NULL_LITERAL, NULL_LITERAL,
NULL_LITERAL),
+ expectedList(8))
assert(
- new Mask(input, Literal("-1"), Literal('q'), Literal('d'),
Literal('*'))
+ new Mask(input, NULL_LITERAL, Literal('q'), Literal('d'),
Literal('*'))
.checkInputDataTypes()
.isSuccess)
assert(
- new Mask(input, Literal(null), Literal("-1"), Literal("-1"),
Literal("-1"))
- .checkInputDataTypes()
- .isFailure)
- assert(
- new Mask(input, Literal("Q"), Literal(null), Literal("-1"),
Literal("-1"))
- .checkInputDataTypes()
- .isFailure)
- assert(
- new Mask(input, Literal("Q"), Literal("q"), Literal(null),
Literal("-1"))
- .checkInputDataTypes()
- .isFailure)
- assert(
- new Mask(input, Literal("Q"), Literal("q"), Literal("n"),
Literal(null))
- .checkInputDataTypes()
- .isFailure)
- assert(
- new Mask(input, Literal(null), Literal(null), Literal(null),
Literal(null))
+ new Mask(input, Literal("QQ"), Literal('q'), Literal('d'),
Literal('*'))
.checkInputDataTypes()
.isFailure)
}
diff --git a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
index 273d3f8f249..9b8d50d2ede 100644
--- a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
+++ b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
@@ -202,7 +202,7 @@
| org.apache.spark.sql.catalyst.expressions.MapKeys | map_keys | SELECT
map_keys(map(1, 'a', 2, 'b')) | struct<map_keys(map(1, a, 2, b)):array<int>> |
| org.apache.spark.sql.catalyst.expressions.MapValues | map_values | SELECT
map_values(map(1, 'a', 2, 'b')) | struct<map_values(map(1, a, 2,
b)):array<string>> |
| org.apache.spark.sql.catalyst.expressions.MapZipWith | map_zip_with | SELECT
map_zip_with(map(1, 'a', 2, 'b'), map(1, 'x', 2, 'y'), (k, v1, v2) ->
concat(v1, v2)) | struct<map_zip_with(map(1, a, 2, b), map(1, x, 2, y),
lambdafunction(concat(namedlambdavariable(), namedlambdavariable()),
namedlambdavariable(), namedlambdavariable(),
namedlambdavariable())):map<int,string>> |
-| org.apache.spark.sql.catalyst.expressions.Mask | mask | SELECT
mask('abcd-EFGH-8765-4321') | struct<mask(abcd-EFGH-8765-4321, X, x, n,
-1):string> |
+| org.apache.spark.sql.catalyst.expressions.Mask | mask | SELECT
mask('abcd-EFGH-8765-4321') | struct<mask(abcd-EFGH-8765-4321, X, x, n,
NULL):string> |
| org.apache.spark.sql.catalyst.expressions.Md5 | md5 | SELECT md5('Spark') |
struct<md5(Spark):string> |
| org.apache.spark.sql.catalyst.expressions.MicrosToTimestamp |
timestamp_micros | SELECT timestamp_micros(1230219000123123) |
struct<timestamp_micros(1230219000123123):timestamp> |
| org.apache.spark.sql.catalyst.expressions.MillisToTimestamp |
timestamp_millis | SELECT timestamp_millis(1230219000123) |
struct<timestamp_millis(1230219000123):timestamp> |
diff --git a/sql/core/src/test/resources/sql-tests/inputs/mask-functions.sql
b/sql/core/src/test/resources/sql-tests/inputs/mask-functions.sql
index a73495a8cb7..b4dc8f18303 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/mask-functions.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/mask-functions.sql
@@ -5,58 +5,52 @@ SELECT mask('AbCD123-@$#', 'Q', 'q');
SELECT mask('AbCD123-@$#', 'Q', 'q', 'd');
SELECT mask('AbCD123-@$#', 'Q', 'q', 'd', 'o');
SELECT mask('AbCD123-@$#', 'Qa', 'qa', 'da', 'oa');
-SELECT mask('AbCD123-@$#', -1, 'q', 'd', 'o');
-SELECT mask('AbCD123-@$#', -1, -1, 'd', 'o');
-SELECT mask('AbCD123-@$#', -1, -1, -1, 'o');
-SELECT mask('AbCD123-@$#', -1, -1, -1, -1);
+SELECT mask('AbCD123-@$#', NULL, 'q', 'd', 'o');
+SELECT mask('AbCD123-@$#', NULL, NULL, 'd', 'o');
+SELECT mask('AbCD123-@$#', NULL, NULL, NULL, 'o');
+SELECT mask('AbCD123-@$#', NULL, NULL, NULL, NULL);
SELECT mask(NULL);
-SELECT mask(NULL, -1, 'q', 'd', 'o');
-SELECT mask(NULL, -1, -1, 'd', 'o');
-SELECT mask(NULL, -1, -1, -1, 'o');
-SELECT mask(NULL, -1, -1, -1, -1);
-SELECT mask('AbCD123-@$#', NULL, -1, -1, -1);
-SELECT mask('AbCD123-@$#', -1, NULL, -1, -1);
-SELECT mask('AbCD123-@$#', -1, -1, NULL, -1);
-SELECT mask('AbCD123-@$#', -1, -1, -1, NULL);
+SELECT mask(NULL, NULL, 'q', 'd', 'o');
+SELECT mask(NULL, NULL, NULL, 'd', 'o');
+SELECT mask(NULL, NULL, NULL, NULL, 'o');
SELECT mask('AbCD123-@$#', NULL, NULL, NULL, NULL);
SELECT mask(c1) from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q') from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q', 'q')from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q', 'q', 'd') from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q', 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
-SELECT mask(c1, -1, 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
-SELECT mask(c1, -1, -1, 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
-SELECT mask(c1, -1, -1, -1, 'o') from values ('AbCD123-@$#') as tab(c1);
-SELECT mask(c1, -1, -1, -1, -1) from values ('AbCD123-@$#') as tab(c1);
+SELECT mask(c1, NULL, 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
+SELECT mask(c1, NULL, NULL, 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
+SELECT mask(c1, NULL, NULL, NULL, 'o') from values ('AbCD123-@$#') as tab(c1);
+SELECT mask(c1, NULL, NULL, NULL, NULL) from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, NULL, 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q', NULL, 'd', 'o') from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q', 'q', NULL, 'o') from values ('AbCD123-@$#') as tab(c1);
SELECT mask(c1, 'Q', 'q', 'd', NULL) from values ('AbCD123-@$#') as tab(c1);
SELECT mask(NULL, 'Q', 'q', 'd', NULL) from values ('AbCD123-@$#') as tab(c1);
-SELECT mask(NULL, NULL, NULL, NULL, NULL) from values ('AbCD123-@$#') as
tab(c1);
SELECT mask('abcd-EFGH-8765-4321');
SELECT mask('abcd-EFGH-8765-4321', 'Q');
SELECT mask('abcd-EFGH-8765-4321', 'Q', 'q');
SELECT mask('abcd-EFGH-8765-4321', 'Q', 'q', 'd');
SELECT mask('abcd-EFGH-8765-4321', 'Q', 'q', 'd', '*');
-SELECT mask('abcd-EFGH-8765-4321', -1, 'q', 'd', '*');
-SELECT mask('abcd-EFGH-8765-4321', -1, -1, 'd', '*');
-SELECT mask('abcd-EFGH-8765-4321', -1, -1, -1, '*');
-SELECT mask('abcd-EFGH-8765-4321', -1, -1, -1, -1);
+SELECT mask('abcd-EFGH-8765-4321', NULL, 'q', 'd', '*');
+SELECT mask('abcd-EFGH-8765-4321', NULL, NULL, 'd', '*');
+SELECT mask('abcd-EFGH-8765-4321', NULL, NULL, NULL, '*');
+SELECT mask('abcd-EFGH-8765-4321', NULL, NULL, NULL, NULL);
SELECT mask(NULL);
-SELECT mask(NULL, -1, 'q', 'd', '*');
-SELECT mask(NULL, -1, -1, 'd', '*');
-SELECT mask(NULL, -1, -1, -1, '*');
-SELECT mask(NULL, -1, -1, -1, -1);
+SELECT mask(NULL, NULL, 'q', 'd', '*');
+SELECT mask(NULL, NULL, NULL, 'd', '*');
+SELECT mask(NULL, NULL, NULL, NULL, '*');
+SELECT mask(NULL, NULL, NULL, NULL, NULL);
SELECT mask(c1) from values ('abcd-EFGH-8765-4321') as tab(c1);
SELECT mask(c1, 'Q') from values ('abcd-EFGH-8765-4321') as tab(c1);
SELECT mask(c1, 'Q', 'q')from values ('abcd-EFGH-8765-4321') as tab(c1);
SELECT mask(c1, 'Q', 'q', 'd') from values ('abcd-EFGH-8765-4321') as tab(c1);
SELECT mask(c1, 'Q', 'q', 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
-SELECT mask(c1, -1, 'q', 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
-SELECT mask(c1, -1, -1, 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
-SELECT mask(c1, -1, -1, -1, '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
-SELECT mask(c1, -1, -1, -1, -1) from values ('abcd-EFGH-8765-4321') as tab(c1);
+SELECT mask(c1, NULL, 'q', 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
+SELECT mask(c1, NULL, NULL, 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
+SELECT mask(c1, NULL, NULL, NULL, '*') from values ('abcd-EFGH-8765-4321') as
tab(c1);
+SELECT mask(c1, NULL, NULL, NULL, NULL) from values ('abcd-EFGH-8765-4321') as
tab(c1);
SELECT mask(c1, replaceArg) from values('abcd-EFGH-8765-4321', 'a') as t(c1,
replaceArg);
SELECT mask(c1, replaceArg) from values('abcd-EFGH-8765-4321', 'ABC') as t(c1,
replaceArg);
SELECT mask(c1, replaceArg) from values('abcd-EFGH-8765-4321', 123) as t(c1,
replaceArg);
diff --git
a/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out
b/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out
index 8fe1176e765..d5345de064c 100644
--- a/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out
@@ -2,7 +2,7 @@
-- !query
SELECT mask('AbCD123-@$#')
-- !query schema
-struct<mask(AbCD123-@$#, X, x, n, -1):string>
+struct<mask(AbCD123-@$#, X, x, n, NULL):string>
-- !query output
XxXXnnn-@$#
@@ -10,7 +10,7 @@ XxXXnnn-@$#
-- !query
SELECT mask('AbCD123-@$#', 'Q')
-- !query schema
-struct<mask(AbCD123-@$#, Q, x, n, -1):string>
+struct<mask(AbCD123-@$#, Q, x, n, NULL):string>
-- !query output
QxQQnnn-@$#
@@ -18,7 +18,7 @@ QxQQnnn-@$#
-- !query
SELECT mask('AbCD123-@$#', 'Q', 'q')
-- !query schema
-struct<mask(AbCD123-@$#, Q, q, n, -1):string>
+struct<mask(AbCD123-@$#, Q, q, n, NULL):string>
-- !query output
QqQQnnn-@$#
@@ -26,7 +26,7 @@ QqQQnnn-@$#
-- !query
SELECT mask('AbCD123-@$#', 'Q', 'q', 'd')
-- !query schema
-struct<mask(AbCD123-@$#, Q, q, d, -1):string>
+struct<mask(AbCD123-@$#, Q, q, d, NULL):string>
-- !query output
QqQQddd-@$#
@@ -63,33 +63,33 @@ org.apache.spark.sql.AnalysisException
-- !query
-SELECT mask('AbCD123-@$#', -1, 'q', 'd', 'o')
+SELECT mask('AbCD123-@$#', NULL, 'q', 'd', 'o')
-- !query schema
-struct<mask(AbCD123-@$#, -1, q, d, o):string>
+struct<mask(AbCD123-@$#, NULL, q, d, o):string>
-- !query output
AqCDdddoooo
-- !query
-SELECT mask('AbCD123-@$#', -1, -1, 'd', 'o')
+SELECT mask('AbCD123-@$#', NULL, NULL, 'd', 'o')
-- !query schema
-struct<mask(AbCD123-@$#, -1, -1, d, o):string>
+struct<mask(AbCD123-@$#, NULL, NULL, d, o):string>
-- !query output
AbCDdddoooo
-- !query
-SELECT mask('AbCD123-@$#', -1, -1, -1, 'o')
+SELECT mask('AbCD123-@$#', NULL, NULL, NULL, 'o')
-- !query schema
-struct<mask(AbCD123-@$#, -1, -1, -1, o):string>
+struct<mask(AbCD123-@$#, NULL, NULL, NULL, o):string>
-- !query output
AbCD123oooo
-- !query
-SELECT mask('AbCD123-@$#', -1, -1, -1, -1)
+SELECT mask('AbCD123-@$#', NULL, NULL, NULL, NULL)
-- !query schema
-struct<mask(AbCD123-@$#, -1, -1, -1, -1):string>
+struct<mask(AbCD123-@$#, NULL, NULL, NULL, NULL):string>
-- !query output
AbCD123-@$#
@@ -97,162 +97,47 @@ AbCD123-@$#
-- !query
SELECT mask(NULL)
-- !query schema
-struct<mask(NULL, X, x, n, -1):string>
--- !query output
-NULL
-
-
--- !query
-SELECT mask(NULL, -1, 'q', 'd', 'o')
--- !query schema
-struct<mask(NULL, -1, q, d, o):string>
+struct<mask(NULL, X, x, n, NULL):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, -1, 'd', 'o')
+SELECT mask(NULL, NULL, 'q', 'd', 'o')
-- !query schema
-struct<mask(NULL, -1, -1, d, o):string>
+struct<mask(NULL, NULL, q, d, o):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, -1, -1, 'o')
+SELECT mask(NULL, NULL, NULL, 'd', 'o')
-- !query schema
-struct<mask(NULL, -1, -1, -1, o):string>
+struct<mask(NULL, NULL, NULL, d, o):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, -1, -1, -1)
+SELECT mask(NULL, NULL, NULL, NULL, 'o')
-- !query schema
-struct<mask(NULL, -1, -1, -1, -1):string>
+struct<mask(NULL, NULL, NULL, NULL, o):string>
-- !query output
NULL
--- !query
-SELECT mask('AbCD123-@$#', NULL, -1, -1, -1)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "upperChar",
- "sqlExpr" : "\"mask(AbCD123-@$#, NULL, -1, -1, -1)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 44,
- "fragment" : "mask('AbCD123-@$#', NULL, -1, -1, -1)"
- } ]
-}
-
-
--- !query
-SELECT mask('AbCD123-@$#', -1, NULL, -1, -1)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "lowerChar",
- "sqlExpr" : "\"mask(AbCD123-@$#, -1, NULL, -1, -1)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 44,
- "fragment" : "mask('AbCD123-@$#', -1, NULL, -1, -1)"
- } ]
-}
-
-
--- !query
-SELECT mask('AbCD123-@$#', -1, -1, NULL, -1)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "digitChar",
- "sqlExpr" : "\"mask(AbCD123-@$#, -1, -1, NULL, -1)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 44,
- "fragment" : "mask('AbCD123-@$#', -1, -1, NULL, -1)"
- } ]
-}
-
-
--- !query
-SELECT mask('AbCD123-@$#', -1, -1, -1, NULL)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "otherChar",
- "sqlExpr" : "\"mask(AbCD123-@$#, -1, -1, -1, NULL)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 44,
- "fragment" : "mask('AbCD123-@$#', -1, -1, -1, NULL)"
- } ]
-}
-
-
-- !query
SELECT mask('AbCD123-@$#', NULL, NULL, NULL, NULL)
-- !query schema
-struct<>
+struct<mask(AbCD123-@$#, NULL, NULL, NULL, NULL):string>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "upperChar",
- "sqlExpr" : "\"mask(AbCD123-@$#, NULL, NULL, NULL, NULL)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 50,
- "fragment" : "mask('AbCD123-@$#', NULL, NULL, NULL, NULL)"
- } ]
-}
+AbCD123-@$#
-- !query
SELECT mask(c1) from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, X, x, n, -1):string>
+struct<mask(c1, X, x, n, NULL):string>
-- !query output
XxXXnnn-@$#
@@ -260,7 +145,7 @@ XxXXnnn-@$#
-- !query
SELECT mask(c1, 'Q') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, Q, x, n, -1):string>
+struct<mask(c1, Q, x, n, NULL):string>
-- !query output
QxQQnnn-@$#
@@ -268,7 +153,7 @@ QxQQnnn-@$#
-- !query
SELECT mask(c1, 'Q', 'q')from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, Q, q, n, -1):string>
+struct<mask(c1, Q, q, n, NULL):string>
-- !query output
QqQQnnn-@$#
@@ -276,7 +161,7 @@ QqQQnnn-@$#
-- !query
SELECT mask(c1, 'Q', 'q', 'd') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, Q, q, d, -1):string>
+struct<mask(c1, Q, q, d, NULL):string>
-- !query output
QqQQddd-@$#
@@ -290,33 +175,33 @@ QqQQdddoooo
-- !query
-SELECT mask(c1, -1, 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1)
+SELECT mask(c1, NULL, 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, -1, q, d, o):string>
+struct<mask(c1, NULL, q, d, o):string>
-- !query output
AqCDdddoooo
-- !query
-SELECT mask(c1, -1, -1, 'd', 'o') from values ('AbCD123-@$#') as tab(c1)
+SELECT mask(c1, NULL, NULL, 'd', 'o') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, -1, -1, d, o):string>
+struct<mask(c1, NULL, NULL, d, o):string>
-- !query output
AbCDdddoooo
-- !query
-SELECT mask(c1, -1, -1, -1, 'o') from values ('AbCD123-@$#') as tab(c1)
+SELECT mask(c1, NULL, NULL, NULL, 'o') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, -1, -1, -1, o):string>
+struct<mask(c1, NULL, NULL, NULL, o):string>
-- !query output
AbCD123oooo
-- !query
-SELECT mask(c1, -1, -1, -1, -1) from values ('AbCD123-@$#') as tab(c1)
+SELECT mask(c1, NULL, NULL, NULL, NULL) from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<mask(c1, -1, -1, -1, -1):string>
+struct<mask(c1, NULL, NULL, NULL, NULL):string>
-- !query output
AbCD123-@$#
@@ -324,145 +209,47 @@ AbCD123-@$#
-- !query
SELECT mask(c1, NULL, 'q', 'd', 'o') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<>
+struct<mask(c1, NULL, q, d, o):string>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "upperChar",
- "sqlExpr" : "\"mask(c1, NULL, q, d, o)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 36,
- "fragment" : "mask(c1, NULL, 'q', 'd', 'o')"
- } ]
-}
+AqCDdddoooo
-- !query
SELECT mask(c1, 'Q', NULL, 'd', 'o') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<>
+struct<mask(c1, Q, NULL, d, o):string>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "lowerChar",
- "sqlExpr" : "\"mask(c1, Q, NULL, d, o)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 36,
- "fragment" : "mask(c1, 'Q', NULL, 'd', 'o')"
- } ]
-}
+QbQQdddoooo
-- !query
SELECT mask(c1, 'Q', 'q', NULL, 'o') from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<>
+struct<mask(c1, Q, q, NULL, o):string>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "digitChar",
- "sqlExpr" : "\"mask(c1, Q, q, NULL, o)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 36,
- "fragment" : "mask(c1, 'Q', 'q', NULL, 'o')"
- } ]
-}
+QqQQ123oooo
-- !query
SELECT mask(c1, 'Q', 'q', 'd', NULL) from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<>
+struct<mask(c1, Q, q, d, NULL):string>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "otherChar",
- "sqlExpr" : "\"mask(c1, Q, q, d, NULL)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 36,
- "fragment" : "mask(c1, 'Q', 'q', 'd', NULL)"
- } ]
-}
+QqQQddd-@$#
-- !query
SELECT mask(NULL, 'Q', 'q', 'd', NULL) from values ('AbCD123-@$#') as tab(c1)
-- !query schema
-struct<>
+struct<mask(NULL, Q, q, d, NULL):string>
-- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "otherChar",
- "sqlExpr" : "\"mask(NULL, Q, q, d, NULL)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 38,
- "fragment" : "mask(NULL, 'Q', 'q', 'd', NULL)"
- } ]
-}
-
-
--- !query
-SELECT mask(NULL, NULL, NULL, NULL, NULL) from values ('AbCD123-@$#') as
tab(c1)
--- !query schema
-struct<>
--- !query output
-org.apache.spark.sql.AnalysisException
-{
- "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_NULL",
- "sqlState" : "42K09",
- "messageParameters" : {
- "exprName" : "upperChar",
- "sqlExpr" : "\"mask(NULL, NULL, NULL, NULL, NULL)\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 41,
- "fragment" : "mask(NULL, NULL, NULL, NULL, NULL)"
- } ]
-}
+NULL
-- !query
SELECT mask('abcd-EFGH-8765-4321')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, X, x, n, -1):string>
+struct<mask(abcd-EFGH-8765-4321, X, x, n, NULL):string>
-- !query output
xxxx-XXXX-nnnn-nnnn
@@ -470,7 +257,7 @@ xxxx-XXXX-nnnn-nnnn
-- !query
SELECT mask('abcd-EFGH-8765-4321', 'Q')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, Q, x, n, -1):string>
+struct<mask(abcd-EFGH-8765-4321, Q, x, n, NULL):string>
-- !query output
xxxx-QQQQ-nnnn-nnnn
@@ -478,7 +265,7 @@ xxxx-QQQQ-nnnn-nnnn
-- !query
SELECT mask('abcd-EFGH-8765-4321', 'Q', 'q')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, Q, q, n, -1):string>
+struct<mask(abcd-EFGH-8765-4321, Q, q, n, NULL):string>
-- !query output
qqqq-QQQQ-nnnn-nnnn
@@ -486,7 +273,7 @@ qqqq-QQQQ-nnnn-nnnn
-- !query
SELECT mask('abcd-EFGH-8765-4321', 'Q', 'q', 'd')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, Q, q, d, -1):string>
+struct<mask(abcd-EFGH-8765-4321, Q, q, d, NULL):string>
-- !query output
qqqq-QQQQ-dddd-dddd
@@ -500,33 +287,33 @@ qqqq*QQQQ*dddd*dddd
-- !query
-SELECT mask('abcd-EFGH-8765-4321', -1, 'q', 'd', '*')
+SELECT mask('abcd-EFGH-8765-4321', NULL, 'q', 'd', '*')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, -1, q, d, *):string>
+struct<mask(abcd-EFGH-8765-4321, NULL, q, d, *):string>
-- !query output
qqqq*EFGH*dddd*dddd
-- !query
-SELECT mask('abcd-EFGH-8765-4321', -1, -1, 'd', '*')
+SELECT mask('abcd-EFGH-8765-4321', NULL, NULL, 'd', '*')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, -1, -1, d, *):string>
+struct<mask(abcd-EFGH-8765-4321, NULL, NULL, d, *):string>
-- !query output
abcd*EFGH*dddd*dddd
-- !query
-SELECT mask('abcd-EFGH-8765-4321', -1, -1, -1, '*')
+SELECT mask('abcd-EFGH-8765-4321', NULL, NULL, NULL, '*')
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, -1, -1, -1, *):string>
+struct<mask(abcd-EFGH-8765-4321, NULL, NULL, NULL, *):string>
-- !query output
abcd*EFGH*8765*4321
-- !query
-SELECT mask('abcd-EFGH-8765-4321', -1, -1, -1, -1)
+SELECT mask('abcd-EFGH-8765-4321', NULL, NULL, NULL, NULL)
-- !query schema
-struct<mask(abcd-EFGH-8765-4321, -1, -1, -1, -1):string>
+struct<mask(abcd-EFGH-8765-4321, NULL, NULL, NULL, NULL):string>
-- !query output
abcd-EFGH-8765-4321
@@ -534,39 +321,39 @@ abcd-EFGH-8765-4321
-- !query
SELECT mask(NULL)
-- !query schema
-struct<mask(NULL, X, x, n, -1):string>
+struct<mask(NULL, X, x, n, NULL):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, 'q', 'd', '*')
+SELECT mask(NULL, NULL, 'q', 'd', '*')
-- !query schema
-struct<mask(NULL, -1, q, d, *):string>
+struct<mask(NULL, NULL, q, d, *):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, -1, 'd', '*')
+SELECT mask(NULL, NULL, NULL, 'd', '*')
-- !query schema
-struct<mask(NULL, -1, -1, d, *):string>
+struct<mask(NULL, NULL, NULL, d, *):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, -1, -1, '*')
+SELECT mask(NULL, NULL, NULL, NULL, '*')
-- !query schema
-struct<mask(NULL, -1, -1, -1, *):string>
+struct<mask(NULL, NULL, NULL, NULL, *):string>
-- !query output
NULL
-- !query
-SELECT mask(NULL, -1, -1, -1, -1)
+SELECT mask(NULL, NULL, NULL, NULL, NULL)
-- !query schema
-struct<mask(NULL, -1, -1, -1, -1):string>
+struct<mask(NULL, NULL, NULL, NULL, NULL):string>
-- !query output
NULL
@@ -574,7 +361,7 @@ NULL
-- !query
SELECT mask(c1) from values ('abcd-EFGH-8765-4321') as tab(c1)
-- !query schema
-struct<mask(c1, X, x, n, -1):string>
+struct<mask(c1, X, x, n, NULL):string>
-- !query output
xxxx-XXXX-nnnn-nnnn
@@ -582,7 +369,7 @@ xxxx-XXXX-nnnn-nnnn
-- !query
SELECT mask(c1, 'Q') from values ('abcd-EFGH-8765-4321') as tab(c1)
-- !query schema
-struct<mask(c1, Q, x, n, -1):string>
+struct<mask(c1, Q, x, n, NULL):string>
-- !query output
xxxx-QQQQ-nnnn-nnnn
@@ -590,7 +377,7 @@ xxxx-QQQQ-nnnn-nnnn
-- !query
SELECT mask(c1, 'Q', 'q')from values ('abcd-EFGH-8765-4321') as tab(c1)
-- !query schema
-struct<mask(c1, Q, q, n, -1):string>
+struct<mask(c1, Q, q, n, NULL):string>
-- !query output
qqqq-QQQQ-nnnn-nnnn
@@ -598,7 +385,7 @@ qqqq-QQQQ-nnnn-nnnn
-- !query
SELECT mask(c1, 'Q', 'q', 'd') from values ('abcd-EFGH-8765-4321') as tab(c1)
-- !query schema
-struct<mask(c1, Q, q, d, -1):string>
+struct<mask(c1, Q, q, d, NULL):string>
-- !query output
qqqq-QQQQ-dddd-dddd
@@ -612,33 +399,33 @@ qqqq*QQQQ*dddd*dddd
-- !query
-SELECT mask(c1, -1, 'q', 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1)
+SELECT mask(c1, NULL, 'q', 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1)
-- !query schema
-struct<mask(c1, -1, q, d, *):string>
+struct<mask(c1, NULL, q, d, *):string>
-- !query output
qqqq*EFGH*dddd*dddd
-- !query
-SELECT mask(c1, -1, -1, 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1)
+SELECT mask(c1, NULL, NULL, 'd', '*') from values ('abcd-EFGH-8765-4321') as
tab(c1)
-- !query schema
-struct<mask(c1, -1, -1, d, *):string>
+struct<mask(c1, NULL, NULL, d, *):string>
-- !query output
abcd*EFGH*dddd*dddd
-- !query
-SELECT mask(c1, -1, -1, -1, '*') from values ('abcd-EFGH-8765-4321') as tab(c1)
+SELECT mask(c1, NULL, NULL, NULL, '*') from values ('abcd-EFGH-8765-4321') as
tab(c1)
-- !query schema
-struct<mask(c1, -1, -1, -1, *):string>
+struct<mask(c1, NULL, NULL, NULL, *):string>
-- !query output
abcd*EFGH*8765*4321
-- !query
-SELECT mask(c1, -1, -1, -1, -1) from values ('abcd-EFGH-8765-4321') as tab(c1)
+SELECT mask(c1, NULL, NULL, NULL, NULL) from values ('abcd-EFGH-8765-4321') as
tab(c1)
-- !query schema
-struct<mask(c1, -1, -1, -1, -1):string>
+struct<mask(c1, NULL, NULL, NULL, NULL):string>
-- !query output
abcd-EFGH-8765-4321
@@ -656,7 +443,7 @@ org.apache.spark.sql.AnalysisException
"inputExpr" : "\"replaceArg\"",
"inputName" : "upperChar",
"inputType" : "\"STRING\"",
- "sqlExpr" : "\"mask(c1, replaceArg, x, n, -1)\""
+ "sqlExpr" : "\"mask(c1, replaceArg, x, n, NULL)\""
},
"queryContext" : [ {
"objectType" : "",
@@ -681,7 +468,7 @@ org.apache.spark.sql.AnalysisException
"inputExpr" : "\"replaceArg\"",
"inputName" : "upperChar",
"inputType" : "\"STRING\"",
- "sqlExpr" : "\"mask(c1, replaceArg, x, n, -1)\""
+ "sqlExpr" : "\"mask(c1, replaceArg, x, n, NULL)\""
},
"queryContext" : [ {
"objectType" : "",
@@ -700,13 +487,14 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
- "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT",
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
- "inputExpr" : "\"replaceArg\"",
- "inputName" : "upperChar",
- "inputType" : "\"STRING\"",
- "sqlExpr" : "\"mask(c1, replaceArg, x, n, -1)\""
+ "inputSql" : "\"replaceArg\"",
+ "inputType" : "\"INT\"",
+ "paramIndex" : "2",
+ "requiredType" : "\"STRING\"",
+ "sqlExpr" : "\"mask(c1, replaceArg, x, n, NULL)\""
},
"queryContext" : [ {
"objectType" : "",
@@ -729,7 +517,7 @@ org.apache.spark.sql.AnalysisException
"sqlState" : "42K09",
"messageParameters" : {
"exprName" : "digitChar",
- "sqlExpr" : "\"mask(abcd-EFGH-8765-4321, A, w, , -1)\""
+ "sqlExpr" : "\"mask(abcd-EFGH-8765-4321, A, w, , NULL)\""
},
"queryContext" : [ {
"objectType" : "",
@@ -752,7 +540,7 @@ org.apache.spark.sql.AnalysisException
"sqlState" : "42K09",
"messageParameters" : {
"exprName" : "lowerChar",
- "sqlExpr" : "\"mask(abcd-EFGH-8765-4321, A, abc, n, -1)\""
+ "sqlExpr" : "\"mask(abcd-EFGH-8765-4321, A, abc, n, NULL)\""
},
"queryContext" : [ {
"objectType" : "",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]