This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 8c4ca7e61228 [SPARK-48693][SQL] Simplify and unify toString of Invoke
and StaticInvoke
8c4ca7e61228 is described below
commit 8c4ca7e6122869702d8b4fe37a499f809b3a51eb
Author: Kent Yao <[email protected]>
AuthorDate: Tue Jun 25 18:36:07 2024 +0800
[SPARK-48693][SQL] Simplify and unify toString of Invoke and StaticInvoke
### What changes were proposed in this pull request?
The `StaticInvoke` class is used extensively by `RuntimeReplacable`
expressions, due to its ugly string representation, a plan with multiple or
nested `StaticInvoke` is hard to read.
This PR overrides `StaticInvoke`'s toString method to improve its
readability.
```diff
Project [left(c7#x, 2) AS left(c7, 2)#x, left(c8#x, 2) AS left(c8, 2)#x,
left(v#x, 3) AS left(v, 3)#x, left(s#x, 2) AS left(s, 2)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, tru
e, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#
x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSideP
adding(c8#x, 8)) AS c8#x, v#x, s#x]
```
In contrast, the `Invoke`'s toString is overly simple, losing its child's
string representations.
### Why are the changes needed?
improve plan readability and consistency
### Does this PR introduce _any_ user-facing change?
Yes, a plan containing `StaticInvoke` will change its string representation.
### How was this patch tested?
existing modified tests
### Was this patch authored or co-authored using generative AI tooling?
no
Closes #47066 from yaooqinn/SPARK-48693.
Authored-by: Kent Yao <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
---
.../explain-results/function_aes_decrypt.explain | 2 +-
.../function_aes_decrypt_with_mode.explain | 2 +-
.../function_aes_decrypt_with_mode_padding.explain | 2 +-
...ction_aes_decrypt_with_mode_padding_aad.explain | 2 +-
.../explain-results/function_aes_encrypt.explain | 2 +-
.../function_aes_encrypt_with_mode.explain | 2 +-
.../function_aes_encrypt_with_mode_padding.explain | 2 +-
...nction_aes_encrypt_with_mode_padding_iv.explain | 2 +-
...on_aes_encrypt_with_mode_padding_iv_aad.explain | 2 +-
.../function_bitmap_bit_position.explain | 2 +-
.../function_bitmap_bucket_number.explain | 2 +-
.../explain-results/function_bitmap_count.explain | 2 +-
.../explain-results/function_decode.explain | 2 +-
.../explain-results/function_encode.explain | 2 +-
.../function_is_variant_null.explain | 2 +-
.../explain-results/function_lpad_binary.explain | 2 +-
.../explain-results/function_parse_json.explain | 2 +-
.../explain-results/function_rpad_binary.explain | 2 +-
.../function_schema_of_variant.explain | 2 +-
.../function_schema_of_variant_agg.explain | 2 +-
.../function_to_binary_with_format.explain | 2 +-
.../function_try_aes_decrypt.explain | 2 +-
.../function_try_aes_decrypt_with_mode.explain | 2 +-
...ction_try_aes_decrypt_with_mode_padding.explain | 2 +-
...n_try_aes_decrypt_with_mode_padding_aad.explain | 2 +-
.../function_try_parse_json.explain | 2 +-
.../function_try_variant_get.explain | 2 +-
.../explain-results/function_url_decode.explain | 2 +-
.../explain-results/function_url_encode.explain | 2 +-
.../explain-results/function_variant_get.explain | 2 +-
.../sql/catalyst/expressions/objects/objects.scala | 12 +++-
.../sql-tests/analyzer-results/charvarchar.sql.out | 66 +++++++++++-----------
32 files changed, 74 insertions(+), 64 deletions(-)
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt.explain
index 55f1c314671a..8321eb8beb92 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), GCM, DEFAULT, cast( as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true) AS aes_decrypt(g, g, GCM,
DEFAULT, )#0]
+Project [static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as binary),
cast(g#0 as binary), GCM, DEFAULT, cast( as binary))) AS aes_decrypt(g, g, GCM,
DEFAULT, )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode.explain
index 762a4f47a058..1a721c372c10 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, DEFAULT, cast( as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true) AS aes_decrypt(g, g, g,
DEFAULT, )#0]
+Project [static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, DEFAULT, cast( as binary))) AS aes_decrypt(g, g, g,
DEFAULT, )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding.explain
index 7c31c1754c3b..0d87c8b40853 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, cast( as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true) AS aes_decrypt(g, g, g,
g, )#0]
+Project [static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, g#0, cast( as binary))) AS aes_decrypt(g, g, g, g,
)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding_aad.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding_aad.explain
index 48b640efb376..3afae44e97dd 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding_aad.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_decrypt_with_mode_padding_aad.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, cast(g#0 as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true) AS aes_decrypt(g, g, g,
g, g)#0]
+Project [static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, g#0, cast(g#0 as binary))) AS aes_decrypt(g, g, g, g,
g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt.explain
index d88a71848572..9f88193ce3e3 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesEncrypt, cast(g#0 as binary), cast(g#0 as binary), GCM, DEFAULT, cast( as
binary), cast( as binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, BinaryType, true, true, true) AS
aes_encrypt(g, g, GCM, DEFAULT, , )#0]
+Project [static_invoke(ExpressionImplUtils.aesEncrypt(cast(g#0 as binary),
cast(g#0 as binary), GCM, DEFAULT, cast( as binary), cast( as binary))) AS
aes_encrypt(g, g, GCM, DEFAULT, , )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode.explain
index 59fb110a8359..97163bf0f7c3 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesEncrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, DEFAULT, cast( as
binary), cast( as binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, BinaryType, true, true, true) AS
aes_encrypt(g, g, g, DEFAULT, , )#0]
+Project [static_invoke(ExpressionImplUtils.aesEncrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, DEFAULT, cast( as binary), cast( as binary))) AS
aes_encrypt(g, g, g, DEFAULT, , )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding.explain
index 80912e43353c..35fdd3df3e6b 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesEncrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, cast( as
binary), cast( as binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, BinaryType, true, true, true) AS
aes_encrypt(g, g, g, g, , )#0]
+Project [static_invoke(ExpressionImplUtils.aesEncrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, g#0, cast( as binary), cast( as binary))) AS
aes_encrypt(g, g, g, g, , )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv.explain
index 6d61e3c7d097..0d566721e51d 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesEncrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, 0x434445, cast(
as binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, BinaryType, true, true, true) AS
aes_encrypt(g, g, g, g, X'434445', )#0]
+Project [static_invoke(ExpressionImplUtils.aesEncrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, g#0, 0x434445, cast( as binary))) AS aes_encrypt(g,
g, g, g, X'434445', )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv_aad.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv_aad.explain
index 9d0bdb901d7e..755332cca5ed 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv_aad.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_aes_encrypt_with_mode_padding_iv_aad.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesEncrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, 0x434445,
cast(g#0 as binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, BinaryType, true, true, true) AS
aes_encrypt(g, g, g, g, X'434445', g)#0]
+Project [static_invoke(ExpressionImplUtils.aesEncrypt(cast(g#0 as binary),
cast(g#0 as binary), g#0, g#0, 0x434445, cast(g#0 as binary))) AS
aes_encrypt(g, g, g, g, X'434445', g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bit_position.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bit_position.explain
index 61a15dd4c945..76b460ad4d04 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bit_position.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bit_position.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.BitmapExpressionUtils, LongType,
bitmapBitPosition, id#0L, LongType, true, false, true) AS
bitmap_bit_position(id)#0L]
+Project [static_invoke(BitmapExpressionUtils.bitmapBitPosition(id#0L)) AS
bitmap_bit_position(id)#0L]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bucket_number.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bucket_number.explain
index 61a15dd4c945..76b460ad4d04 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bucket_number.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_bucket_number.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.BitmapExpressionUtils, LongType,
bitmapBitPosition, id#0L, LongType, true, false, true) AS
bitmap_bit_position(id)#0L]
+Project [static_invoke(BitmapExpressionUtils.bitmapBitPosition(id#0L)) AS
bitmap_bit_position(id)#0L]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_count.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_count.explain
index da43425c3ec0..c2783bff65ee 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_count.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_bitmap_count.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.BitmapExpressionUtils, LongType,
bitmapCount, bytes#0, BinaryType, true, false, true) AS bitmap_count(bytes)#0L]
+Project [static_invoke(BitmapExpressionUtils.bitmapCount(bytes#0)) AS
bitmap_count(bytes)#0L]
+- LocalRelation <empty>, [id#0L, bytes#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_decode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_decode.explain
index e1a445120c13..c7f2e4cf9c76 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_decode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_decode.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.StringDecode, StringType, decode,
cast(g#0 as binary), UTF-8, false, false, BinaryType, StringTypeAnyCollation,
BooleanType, BooleanType, true, true, true) AS decode(g, UTF-8)#0]
+Project [static_invoke(StringDecode.decode(cast(g#0 as binary), UTF-8, false,
false)) AS decode(g, UTF-8)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_encode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_encode.explain
index 7ce8776d754d..3f36f5e4451b 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_encode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_encode.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class org.apache.spark.sql.catalyst.expressions.Encode,
BinaryType, encode, g#0, UTF-8, false, false, StringTypeAnyCollation,
StringTypeAnyCollation, BooleanType, BooleanType, true, true, true) AS
encode(g, UTF-8)#0]
+Project [static_invoke(Encode.encode(g#0, UTF-8, false, false)) AS encode(g,
UTF-8)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_is_variant_null.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_is_variant_null.explain
index 3c0b4fd87d9d..e750021ce22b 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_is_variant_null.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_is_variant_null.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
BooleanType, isVariantNull, staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, true, StringTypeAnyCollation, BooleanType, true,
false, true), VariantType, false, false, true) AS
is_variant_null(parse_json(g))#0]
+Project
[static_invoke(VariantExpressionEvalUtils.isVariantNull(static_invoke(VariantExpressionEvalUtils.parseJson(g#0,
true)))) AS is_variant_null(parse_json(g))#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_lpad_binary.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_lpad_binary.explain
index 4efc5a3709b6..50b50a19a49c 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_lpad_binary.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_lpad_binary.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class org.apache.spark.unsafe.types.ByteArray,
BinaryType, lpad, bytes#0, 5, 0x0C0A0F0E, BinaryType, IntegerType, BinaryType,
true, false, true) AS lpad(bytes, 5, X'0C0A0F0E')#0]
+Project [static_invoke(ByteArray.lpad(bytes#0, 5, 0x0C0A0F0E)) AS lpad(bytes,
5, X'0C0A0F0E')#0]
+- LocalRelation <empty>, [id#0L, bytes#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_parse_json.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_parse_json.explain
index 9ba74d04b02a..cbcf803b3901 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_parse_json.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_parse_json.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, true, StringTypeAnyCollation, BooleanType, true,
false, true) AS parse_json(g)#0]
+Project [static_invoke(VariantExpressionEvalUtils.parseJson(g#0, true)) AS
parse_json(g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_rpad_binary.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_rpad_binary.explain
index 10d77eef1cb6..5726552fe429 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_rpad_binary.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_rpad_binary.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class org.apache.spark.unsafe.types.ByteArray,
BinaryType, rpad, bytes#0, 5, 0x0B0A0B0E, BinaryType, IntegerType, BinaryType,
true, false, true) AS rpad(bytes, 5, X'0B0A0B0E')#0]
+Project [static_invoke(ByteArray.rpad(bytes#0, 5, 0x0B0A0B0E)) AS rpad(bytes,
5, X'0B0A0B0E')#0]
+- LocalRelation <empty>, [id#0L, bytes#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant.explain
index d61db9f5394c..04b33fdd7067 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.SchemaOfVariant$, StringType,
schemaOfVariant, staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, true, StringTypeAnyCollation, BooleanType, true,
false, true), VariantType, true, false, true) AS
schema_of_variant(parse_json(g))#0]
+Project
[static_invoke(SchemaOfVariant.schemaOfVariant(static_invoke(VariantExpressionEvalUtils.parseJson(g#0,
true)))) AS schema_of_variant(parse_json(g))#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant_agg.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant_agg.explain
index 36f8920ce10c..18e8801bb298 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant_agg.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_schema_of_variant_agg.explain
@@ -1,2 +1,2 @@
-Aggregate [schema_of_variant_agg(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, true, StringTypeAnyCollation, BooleanType, true,
false, true), 0, 0) AS schema_of_variant_agg(parse_json(g))#0]
+Aggregate
[schema_of_variant_agg(static_invoke(VariantExpressionEvalUtils.parseJson(g#0,
true)), 0, 0) AS schema_of_variant_agg(parse_json(g))#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_to_binary_with_format.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_to_binary_with_format.explain
index d999697a4c9e..3017720acbaf 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_to_binary_with_format.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_to_binary_with_format.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class org.apache.spark.sql.catalyst.expressions.Encode,
BinaryType, encode, g#0, UTF-8, false, false, StringTypeAnyCollation,
StringTypeAnyCollation, BooleanType, BooleanType, true, true, true) AS
to_binary(g, utf-8)#0]
+Project [static_invoke(Encode.encode(g#0, UTF-8, false, false)) AS
to_binary(g, utf-8)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt.explain
index 56d4c6eb0e0a..8ab4b477bb55 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt.explain
@@ -1,2 +1,2 @@
-Project [tryeval(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), GCM, DEFAULT, cast( as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true)) AS try_aes_decrypt(g, g,
GCM, DEFAULT, )#0]
+Project [tryeval(static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as
binary), cast(g#0 as binary), GCM, DEFAULT, cast( as binary)))) AS
try_aes_decrypt(g, g, GCM, DEFAULT, )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode.explain
index 6b46dbd067ad..e45fef8af254 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode.explain
@@ -1,2 +1,2 @@
-Project [tryeval(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, DEFAULT, cast( as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true)) AS try_aes_decrypt(g, g,
g, DEFAULT, )#0]
+Project [tryeval(static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as
binary), cast(g#0 as binary), g#0, DEFAULT, cast( as binary)))) AS
try_aes_decrypt(g, g, g, DEFAULT, )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding.explain
index 9436cc826022..cdee84b92bc2 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding.explain
@@ -1,2 +1,2 @@
-Project [tryeval(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, cast( as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true)) AS try_aes_decrypt(g, g,
g, g, )#0]
+Project [tryeval(static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as
binary), cast(g#0 as binary), g#0, g#0, cast( as binary)))) AS
try_aes_decrypt(g, g, g, g, )#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding_aad.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding_aad.explain
index c8182e3b05dd..b5eb4258b525 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding_aad.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_aes_decrypt_with_mode_padding_aad.explain
@@ -1,2 +1,2 @@
-Project [tryeval(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.ExpressionImplUtils, BinaryType,
aesDecrypt, cast(g#0 as binary), cast(g#0 as binary), g#0, g#0, cast(g#0 as
binary), BinaryType, BinaryType, StringTypeAnyCollation,
StringTypeAnyCollation, BinaryType, true, true, true)) AS try_aes_decrypt(g, g,
g, g, g)#0]
+Project [tryeval(static_invoke(ExpressionImplUtils.aesDecrypt(cast(g#0 as
binary), cast(g#0 as binary), g#0, g#0, cast(g#0 as binary)))) AS
try_aes_decrypt(g, g, g, g, g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_parse_json.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_parse_json.explain
index fda72dae1a74..826ec4fc81d8 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_parse_json.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_parse_json.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, false, StringTypeAnyCollation, BooleanType, true,
true, true) AS try_parse_json(g)#0]
+Project [static_invoke(VariantExpressionEvalUtils.parseJson(g#0, false)) AS
try_parse_json(g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_variant_get.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_variant_get.explain
index 143bd113fd87..933fbff8e1f3 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_variant_get.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_try_variant_get.explain
@@ -1,2 +1,2 @@
-Project [try_variant_get(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, true, StringTypeAnyCollation, BooleanType, true,
false, true), $, IntegerType, false, Some(America/Los_Angeles)) AS
try_variant_get(parse_json(g), $)#0]
+Project
[try_variant_get(static_invoke(VariantExpressionEvalUtils.parseJson(g#0,
true)), $, IntegerType, false, Some(America/Los_Angeles)) AS
try_variant_get(parse_json(g), $)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_decode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_decode.explain
index ee4936fec537..6111cc1374fb 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_decode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_decode.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.UrlCodec$, StringType, decode, g#0,
UTF-8, StringTypeAnyCollation, StringTypeAnyCollation, true, true, true) AS
url_decode(g)#0]
+Project [static_invoke(UrlCodec.decode(g#0, UTF-8)) AS url_decode(g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_encode.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_encode.explain
index 45c55f4f8737..871842d41ba4 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_encode.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_url_encode.explain
@@ -1,2 +1,2 @@
-Project [staticinvoke(class
org.apache.spark.sql.catalyst.expressions.UrlCodec$, StringType, encode, g#0,
UTF-8, StringTypeAnyCollation, StringTypeAnyCollation, true, true, true) AS
url_encode(g)#0]
+Project [static_invoke(UrlCodec.encode(g#0, UTF-8)) AS url_encode(g)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_variant_get.explain
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_variant_get.explain
index f3af6fa9cf20..2e0baf058f72 100644
---
a/connector/connect/common/src/test/resources/query-tests/explain-results/function_variant_get.explain
+++
b/connector/connect/common/src/test/resources/query-tests/explain-results/function_variant_get.explain
@@ -1,2 +1,2 @@
-Project [variant_get(staticinvoke(class
org.apache.spark.sql.catalyst.expressions.variant.VariantExpressionEvalUtils$,
VariantType, parseJson, g#0, true, StringTypeAnyCollation, BooleanType, true,
false, true), $, IntegerType, true, Some(America/Los_Angeles)) AS
variant_get(parse_json(g), $)#0]
+Project [variant_get(static_invoke(VariantExpressionEvalUtils.parseJson(g#0,
true)), $, IntegerType, true, Some(America/Los_Angeles)) AS
variant_get(parse_json(g), $)#0]
+- LocalRelation <empty>, [id#0L, a#0, b#0, d#0, e#0, f#0, g#0]
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
index 32d8eebd01ce..09d024feccfa 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
@@ -360,6 +360,15 @@ case class StaticInvoke(
super.stringArgs.toSeq.dropRight(1).iterator
}
}
+
+ override def toString: String =
+ s"static_invoke(${
+ if (objectName.startsWith("org.apache.spark.")) {
+ cls.getSimpleName
+ } else {
+ objectName
+ }
+ }.$functionName(${arguments.mkString(", ")}))"
}
/**
@@ -509,7 +518,8 @@ case class Invoke(
ev.copy(code = code)
}
- override def toString: String = s"$targetObject.$functionName"
+ override def toString: String =
+ s"invoke($targetObject.$functionName(${arguments.mkString(", ")}))"
override protected def withNewChildrenInternal(newChildren:
IndexedSeq[Expression]): Invoke =
copy(targetObject = newChildren.head, arguments = newChildren.tail)
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
index 6ba952226270..5c1417f7c0aa 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
@@ -174,7 +174,7 @@ alter view char_view as select * from char_tbl2
AlterViewAsCommand `spark_catalog`.`default`.`char_view`, select * from
char_tbl2, true
+- Project [c#x, v#x]
+- SubqueryAlias spark_catalog.default.char_tbl2
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c#x, 5, true, false, true) AS c#x, v#x]
+ +- Project
[static_invoke(CharVarcharCodegenUtils.readSidePadding(c#x, 5)) AS c#x, v#x]
+- Relation spark_catalog.default.char_tbl2[c#x,v#x] parquet
@@ -364,7 +364,7 @@ CreateDataSourceTableCommand
`spark_catalog`.`default`.`char_tbl4`, false
insert into char_tbl4 select c, c, v, c from str_view
-- !query analysis
InsertIntoHadoopFsRelationCommand file:[not included in
comparison]/{warehouse_dir}/char_tbl4, false, Parquet, [path=file:[not included
in comparison]/{warehouse_dir}/char_tbl4], Append,
`spark_catalog`.`default`.`char_tbl4`,
org.apache.spark.sql.execution.datasources.InMemoryFileIndex(file:[not included
in comparison]/{warehouse_dir}/char_tbl4), [c7, c8, v, s]
-+- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
charTypeWriteSideCheck, cast(c#x as string), 7, true, false, true) AS c7#x,
staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils,
StringType, charTypeWriteSideCheck, cast(c#x as string), 8, true, false, true)
AS c8#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
varcharTypeWriteSideCheck, cast(v#x as string), 6, true [...]
++- Project
[static_invoke(CharVarcharCodegenUtils.charTypeWriteSideCheck(cast(c#x as
string), 7)) AS c7#x,
static_invoke(CharVarcharCodegenUtils.charTypeWriteSideCheck(cast(c#x as
string), 8)) AS c8#x,
static_invoke(CharVarcharCodegenUtils.varcharTypeWriteSideCheck(cast(v#x as
string), 6)) AS v#x, cast(c#x as string) AS s#x]
+- Project [c#x, c#x, v#x, c#x]
+- SubqueryAlias str_view
+- View (`str_view`, [c#x, v#x])
@@ -379,7 +379,7 @@ select c7, c8, v, s from char_tbl4
-- !query analysis
Project [c7#x, c8#x, v#x, s#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -389,7 +389,7 @@ select c7, c8, v, s from char_tbl4 where c7 = c8
Project [c7#x, c8#x, v#x, s#x]
+- Filter (rpad(c7#x, 8, ) = c8#x)
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -399,7 +399,7 @@ select c7, c8, v, s from char_tbl4 where c7 = v
Project [c7#x, c8#x, v#x, s#x]
+- Filter (c7#x = v#x)
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -409,7 +409,7 @@ select c7, c8, v, s from char_tbl4 where c7 = s
Project [c7#x, c8#x, v#x, s#x]
+- Filter (c7#x = s#x)
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -419,7 +419,7 @@ select c7, c8, v, s from char_tbl4 where c7 = 'NetEase
'
Project [c7#x, c8#x, v#x, s#x]
+- Filter (rpad(c7#x, 22, ) = NetEase )
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -429,7 +429,7 @@ select c7, c8, v, s from char_tbl4 where v = 'Spark '
Project [c7#x, c8#x, v#x, s#x]
+- Filter (v#x = Spark )
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -439,7 +439,7 @@ select c7, c8, v, s from char_tbl4 order by c7
Sort [c7#x ASC NULLS FIRST], true
+- Project [c7#x, c8#x, v#x, s#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -449,7 +449,7 @@ select c7, c8, v, s from char_tbl4 order by v
Sort [v#x ASC NULLS FIRST], true
+- Project [c7#x, c8#x, v#x, s#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -458,7 +458,7 @@ select ascii(c7), ascii(c8), ascii(v), ascii(s) from
char_tbl4
-- !query analysis
Project [ascii(c7#x) AS ascii(c7)#x, ascii(c8#x) AS ascii(c8)#x, ascii(v#x) AS
ascii(v)#x, ascii(s#x) AS ascii(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -467,7 +467,7 @@ select base64(c7), base64(c8), base64(v), ascii(s) from
char_tbl4
-- !query analysis
Project [base64(cast(c7#x as binary)) AS base64(c7)#x, base64(cast(c8#x as
binary)) AS base64(c8)#x, base64(cast(v#x as binary)) AS base64(v)#x,
ascii(s#x) AS ascii(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -476,7 +476,7 @@ select bit_length(c7), bit_length(c8), bit_length(v),
bit_length(s) from char_tb
-- !query analysis
Project [bit_length(c7#x) AS bit_length(c7)#x, bit_length(c8#x) AS
bit_length(c8)#x, bit_length(v#x) AS bit_length(v)#x, bit_length(s#x) AS
bit_length(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -485,7 +485,7 @@ select char_length(c7), char_length(c8), char_length(v),
char_length(s) from cha
-- !query analysis
Project [char_length(c7#x) AS char_length(c7)#x, char_length(c8#x) AS
char_length(c8)#x, char_length(v#x) AS char_length(v)#x, char_length(s#x) AS
char_length(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -494,7 +494,7 @@ select octet_length(c7), octet_length(c8), octet_length(v),
octet_length(s) from
-- !query analysis
Project [octet_length(c7#x) AS octet_length(c7)#x, octet_length(c8#x) AS
octet_length(c8)#x, octet_length(v#x) AS octet_length(v)#x, octet_length(s#x)
AS octet_length(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -503,7 +503,7 @@ select concat_ws('|', c7, c8), concat_ws('|', c7, v),
concat_ws('|', c7, s), con
-- !query analysis
Project [concat_ws(|, c7#x, c8#x) AS concat_ws(|, c7, c8)#x, concat_ws(|,
c7#x, v#x) AS concat_ws(|, c7, v)#x, concat_ws(|, c7#x, s#x) AS concat_ws(|,
c7, s)#x, concat_ws(|, v#x, s#x) AS concat_ws(|, v, s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -512,7 +512,7 @@ select concat(c7, c8), concat(c7, v), concat(c7, s),
concat(v, s) from char_tbl4
-- !query analysis
Project [concat(c7#x, c8#x) AS concat(c7, c8)#x, concat(c7#x, v#x) AS
concat(c7, v)#x, concat(c7#x, s#x) AS concat(c7, s)#x, concat(v#x, s#x) AS
concat(v, s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -521,7 +521,7 @@ select like(c7, 'Ne _'), like(c8, 'Ne _') from
char_tbl4
-- !query analysis
Project [c7#x LIKE Ne _ AS c7 LIKE Ne _#x, c8#x LIKE Ne _ AS c8
LIKE Ne _#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -530,7 +530,7 @@ select like(v, 'Spark_') from char_tbl4
-- !query analysis
Project [v#x LIKE Spark_ AS v LIKE Spark_#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -540,7 +540,7 @@ select c7 = c8, upper(c7) = upper(c8), lower(c7) =
lower(c8) from char_tbl4 wher
Project [(rpad(c7#x, 8, ) = c8#x) AS (c7 = c8)#x, (upper(c7#x) = upper(c8#x))
AS (upper(c7) = upper(c8))#x, (lower(c7#x) = lower(c8#x)) AS (lower(c7) =
lower(c8))#x]
+- Filter (s#x = NetEase)
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -550,7 +550,7 @@ select c7 = s, upper(c7) = upper(s), lower(c7) = lower(s)
from char_tbl4 where s
Project [(c7#x = s#x) AS (c7 = s)#x, (upper(c7#x) = upper(s#x)) AS (upper(c7)
= upper(s))#x, (lower(c7#x) = lower(s#x)) AS (lower(c7) = lower(s))#x]
+- Filter (s#x = NetEase)
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -560,7 +560,7 @@ select c7 = 'NetEase', upper(c7) = upper('NetEase'),
lower(c7) = lower('NetEase'
Project [(c7#x = NetEase) AS (c7 = NetEase)#x, (upper(c7#x) = upper(NetEase))
AS (upper(c7) = upper(NetEase))#x, (lower(c7#x) = lower(NetEase)) AS (lower(c7)
= lower(NetEase))#x]
+- Filter (s#x = NetEase)
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x,
7)) AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -569,7 +569,7 @@ select printf('Hey, %s%s%s%s', c7, c8, v, s) from char_tbl4
-- !query analysis
Project [printf(Hey, %s%s%s%s, c7#x, c8#x, v#x, s#x) AS printf(Hey, %s%s%s%s,
c7, c8, v, s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -578,7 +578,7 @@ select repeat(c7, 2), repeat(c8, 2), repeat(v, 2),
repeat(s, 2) from char_tbl4
-- !query analysis
Project [repeat(c7#x, 2) AS repeat(c7, 2)#x, repeat(c8#x, 2) AS repeat(c8,
2)#x, repeat(v#x, 2) AS repeat(v, 2)#x, repeat(s#x, 2) AS repeat(s, 2)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -587,7 +587,7 @@ select replace(c7, 'Net', 'Apache'), replace(c8, 'Net',
'Apache'), replace(v, 'S
-- !query analysis
Project [replace(c7#x, Net, Apache) AS replace(c7, Net, Apache)#x,
replace(c8#x, Net, Apache) AS replace(c8, Net, Apache)#x, replace(v#x, Spark,
Kyuubi) AS replace(v, Spark, Kyuubi)#x, replace(s#x, Net, Apache) AS replace(s,
Net, Apache)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -596,7 +596,7 @@ select rpad(c7, 10), rpad(c8, 5), rpad(v, 5), rpad(s, 5)
from char_tbl4
-- !query analysis
Project [rpad(c7#x, 10, ) AS rpad(c7, 10, )#x, rpad(c8#x, 5, ) AS rpad(c8,
5, )#x, rpad(v#x, 5, ) AS rpad(v, 5, )#x, rpad(s#x, 5, ) AS rpad(s, 5, )#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -605,7 +605,7 @@ select rtrim(c7), rtrim(c8), rtrim(v), rtrim(s) from
char_tbl4
-- !query analysis
Project [rtrim(c7#x, None) AS rtrim(c7)#x, rtrim(c8#x, None) AS rtrim(c8)#x,
rtrim(v#x, None) AS rtrim(v)#x, rtrim(s#x, None) AS rtrim(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -614,7 +614,7 @@ select split(c7, 'e'), split(c8, 'e'), split(v, 'a'),
split(s, 'e') from char_tb
-- !query analysis
Project [split(c7#x, e, -1) AS split(c7, e, -1)#x, split(c8#x, e, -1) AS
split(c8, e, -1)#x, split(v#x, a, -1) AS split(v, a, -1)#x, split(s#x, e, -1)
AS split(s, e, -1)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -623,7 +623,7 @@ select substring(c7, 2), substring(c8, 2), substring(v, 3),
substring(s, 2) from
-- !query analysis
Project [substring(c7#x, 2, 2147483647) AS substring(c7, 2, 2147483647)#x,
substring(c8#x, 2, 2147483647) AS substring(c8, 2, 2147483647)#x,
substring(v#x, 3, 2147483647) AS substring(v, 3, 2147483647)#x, substring(s#x,
2, 2147483647) AS substring(s, 2, 2147483647)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -632,7 +632,7 @@ select left(c7, 2), left(c8, 2), left(v, 3), left(s, 2)
from char_tbl4
-- !query analysis
Project [left(c7#x, 2) AS left(c7, 2)#x, left(c8#x, 2) AS left(c8, 2)#x,
left(v#x, 3) AS left(v, 3)#x, left(s#x, 2) AS left(s, 2)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -641,7 +641,7 @@ select right(c7, 2), right(c8, 2), right(v, 3), right(s, 2)
from char_tbl4
-- !query analysis
Project [right(c7#x, 2) AS right(c7, 2)#x, right(c8#x, 2) AS right(c8, 2)#x,
right(v#x, 3) AS right(v, 3)#x, right(s#x, 2) AS right(s, 2)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
@@ -652,7 +652,7 @@ GlobalLimit 1
+- LocalLimit 1
+- Project [typeof(c7#x) AS typeof(c7)#x, typeof(c8#x) AS typeof(c8)#x,
typeof(v#x) AS typeof(v)#x, typeof(s#x) AS typeof(s)#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project
[static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7)) AS c7#x,
static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS c8#x, v#x,
s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x]
parquet
@@ -661,7 +661,7 @@ select cast(c7 as char(1)), cast(c8 as char(10)), cast(v as
char(1)), cast(v as
-- !query analysis
Project [cast(c7#x as string) AS c7#x, cast(c8#x as string) AS c8#x, cast(v#x
as string) AS v#x, cast(v#x as string) AS v#x, cast(s#x as string) AS s#x]
+- SubqueryAlias spark_catalog.default.char_tbl4
- +- Project [staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c7#x, 7, true, false, true) AS c7#x, staticinvoke(class
org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType,
readSidePadding, c8#x, 8, true, false, true) AS c8#x, v#x, s#x]
+ +- Project [static_invoke(CharVarcharCodegenUtils.readSidePadding(c7#x, 7))
AS c7#x, static_invoke(CharVarcharCodegenUtils.readSidePadding(c8#x, 8)) AS
c8#x, v#x, s#x]
+- Relation spark_catalog.default.char_tbl4[c7#x,c8#x,v#x,s#x] parquet
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]