Sandeep Singh created SPARK-41906:
-------------------------------------
Summary: Handle Function `rand() `
Key: SPARK-41906
URL: https://issues.apache.org/jira/browse/SPARK-41906
Project: Spark
Issue Type: Sub-task
Components: Connect
Affects Versions: 3.4.0
Reporter: Sandeep Singh
{code:java}
df = self.spark.createDataFrame(
[
(
[1, 2, 3],
2,
2,
),
(
[4, 5],
2,
2,
),
],
["x", "index", "len"],
)
expected = [Row(sliced=[2, 3]), Row(sliced=[5])]
self.assertTrue(
all(
[
df.select(slice(df.x, 2, 2).alias("sliced")).collect() == expected,
df.select(slice(df.x, lit(2), lit(2)).alias("sliced")).collect() ==
expected,
df.select(slice("x", "index", "len").alias("sliced")).collect() ==
expected,
]
)
)
self.assertEqual(
df.select(slice(df.x, size(df.x) - 1, lit(1)).alias("sliced")).collect(),
[Row(sliced=[2]), Row(sliced=[4])],
)
self.assertEqual(
df.select(slice(df.x, lit(1), size(df.x) - 1).alias("sliced")).collect(),
[Row(sliced=[1, 2]), Row(sliced=[4])],
){code}
{code:java}
Traceback (most recent call last):
File
"/Users/s.singh/personal/spark-oss/python/pyspark/sql/tests/test_functions.py",
line 596, in test_slice
df.select(slice("x", "index", "len").alias("sliced")).collect() == expected,
File "/Users/s.singh/personal/spark-oss/python/pyspark/sql/utils.py", line
332, in wrapped
return getattr(functions, f.__name__)(*args, **kwargs)
File
"/Users/s.singh/personal/spark-oss/python/pyspark/sql/connect/functions.py",
line 1525, in slice
raise TypeError(f"start should be a Column or int, but got
{type(start).__name__}")
TypeError: start should be a Column or int, but got str{code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]