uros-db commented on code in PR #47828:
URL: https://github.com/apache/spark/pull/47828#discussion_r1724788794
##########
sql/core/src/test/resources/sql-tests/inputs/collations.sql:
##########
@@ -90,3 +90,457 @@ select 'a' collate en_ci_ai = 'Å';
select 'Kypper' collate sv < 'Köpfe';
select 'Kypper' collate de > 'Köpfe';
select 'I' collate tr_ci = 'ı';
+
+-- ConcatWs
+SELECT concat_ws(collate(' ', 'UTF8_BINARY'), collate('Spark', 'UTF8_BINARY'),
collate('SQL', 'UTF8_BINARY'));
+SELECT concat_ws(collate(' ', 'UTF8_BINARY'), 'Spark', 'SQL');
+SELECT concat_ws(' ', collate('Spark', 'UTF8_BINARY'), collate('SQL',
'UTF8_BINARY'));
+SELECT concat_ws(collate(' ', 'UTF8_LCASE'), collate('Spark', 'UTF8_LCASE'),
collate('SQL', 'UTF8_LCASE'));
+SELECT concat_ws(collate(' ', 'UTF8_LCASE'), 'Spark', 'SQL');
+SELECT concat_ws(' ', collate('Spark', 'UTF8_LCASE'), collate('SQL',
'UTF8_LCASE'));
+SELECT concat_ws(collate(' ', 'UNICODE'), collate('Spark', 'UNICODE'),
collate('SQL', 'UNICODE'));
+SELECT concat_ws(collate(' ', 'UNICODE'), 'Spark', 'SQL');
+SELECT concat_ws(' ', collate('Spark', 'UNICODE'), collate('SQL', 'UNICODE'));
+SELECT concat_ws(collate(' ', 'UNICODE_CI'), collate('Spark', 'UNICODE_CI'),
collate('SQL', 'UNICODE_CI'));
+SELECT concat_ws(collate(' ', 'UNICODE_CI'), 'Spark', 'SQL');
+SELECT concat_ws(' ', collate('Spark', 'UNICODE_CI'), collate('SQL',
'UNICODE_CI'));
+SELECT concat_ws(' ', collate('Spark', 'UTF8_LCASE'), collate('SQL',
'UNICODE'));
Review Comment:
these are all testing the `concat_ws` function on literals, however - we
want to test in on **table columns** (see `t1` at the beginning of the file)
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]