This is an automated email from the ASF dual-hosted git repository.
ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new ea8b1392d84 [SPARK-46284][PYTHON][CONNECT] Add `session_user` function
to Python
ea8b1392d84 is described below
commit ea8b1392d84757cdab03e40c2c3efea1d8ef3c82
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Wed Dec 6 18:30:44 2023 +0800
[SPARK-46284][PYTHON][CONNECT] Add `session_user` function to Python
### What changes were proposed in this pull request?
`session_user` function was added in Scala in
https://github.com/apache/spark/pull/42549, this PR adds it to Python
### Why are the changes needed?
for parity
### Does this PR introduce _any_ user-facing change?
yes
```
>>> import pyspark.sql.functions as sf
>>> spark.range(1).select(sf.session_user()).show() # doctest: +SKIP
+--------------+
|current_user()|
+--------------+
| ruifeng.zheng|
+--------------+
```
### How was this patch tested?
ci
### Was this patch authored or co-authored using generative AI tooling?
no
Closes #44205 from zhengruifeng/connect_session_user.
Authored-by: Ruifeng Zheng <[email protected]>
Signed-off-by: Ruifeng Zheng <[email protected]>
---
.../docs/source/reference/pyspark.sql/functions.rst | 1 +
python/pyspark/sql/connect/functions/builtin.py | 7 +++++++
python/pyspark/sql/functions/builtin.py | 19 +++++++++++++++++++
python/pyspark/sql/tests/test_functions.py | 1 -
4 files changed, 27 insertions(+), 1 deletion(-)
diff --git a/python/docs/source/reference/pyspark.sql/functions.rst
b/python/docs/source/reference/pyspark.sql/functions.rst
index 3b6a55a2a6f..d1dba5f2bed 100644
--- a/python/docs/source/reference/pyspark.sql/functions.rst
+++ b/python/docs/source/reference/pyspark.sql/functions.rst
@@ -585,6 +585,7 @@ Misc Functions
monotonically_increasing_id
raise_error
reflect
+ session_user
spark_partition_id
try_aes_decrypt
try_reflect
diff --git a/python/pyspark/sql/connect/functions/builtin.py
b/python/pyspark/sql/connect/functions/builtin.py
index 882fbbccf63..48a7a223e6e 100644
--- a/python/pyspark/sql/connect/functions/builtin.py
+++ b/python/pyspark/sql/connect/functions/builtin.py
@@ -3613,6 +3613,13 @@ def user() -> Column:
user.__doc__ = pysparkfuncs.user.__doc__
+def session_user() -> Column:
+ return _invoke_function("session_user")
+
+
+session_user.__doc__ = pysparkfuncs.session_user.__doc__
+
+
def assert_true(col: "ColumnOrName", errMsg: Optional[Union[Column, str]] =
None) -> Column:
if errMsg is None:
return _invoke_function_over_columns("assert_true", col)
diff --git a/python/pyspark/sql/functions/builtin.py
b/python/pyspark/sql/functions/builtin.py
index ac237f10c2e..87ae84c4e2d 100644
--- a/python/pyspark/sql/functions/builtin.py
+++ b/python/pyspark/sql/functions/builtin.py
@@ -8914,6 +8914,25 @@ def user() -> Column:
return _invoke_function("user")
+@_try_remote_functions
+def session_user() -> Column:
+ """Returns the user name of current execution context.
+
+ .. versionadded:: 4.0.0
+
+ Examples
+ --------
+ >>> import pyspark.sql.functions as sf
+ >>> spark.range(1).select(sf.session_user()).show() # doctest: +SKIP
+ +--------------+
+ |current_user()|
+ +--------------+
+ | ruifeng.zheng|
+ +--------------+
+ """
+ return _invoke_function("session_user")
+
+
@_try_remote_functions
def crc32(col: "ColumnOrName") -> Column:
"""
diff --git a/python/pyspark/sql/tests/test_functions.py
b/python/pyspark/sql/tests/test_functions.py
index 7d8acbb2b18..2bdcfa6085f 100644
--- a/python/pyspark/sql/tests/test_functions.py
+++ b/python/pyspark/sql/tests/test_functions.py
@@ -66,7 +66,6 @@ class FunctionsTestsMixin:
"random", # namespace conflict with python built-in module
"uuid", # namespace conflict with python built-in module
"chr", # namespace conflict with python built-in function
- "session_user", # Scala only for now, needs implementation
"partitioning$", # partitioning expressions for DSv2
]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]