This is an automated email from the ASF dual-hosted git repository.
feiwang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 2813d9dad [KYUUBI #5887][FOLLOWUP] Catch all exception when dump the
result to json and fix typo
2813d9dad is described below
commit 2813d9dad9291d30bb3b34f75ab913bdd0222423
Author: Fei Wang <[email protected]>
AuthorDate: Sat Feb 3 18:07:25 2024 -0800
[KYUUBI #5887][FOLLOWUP] Catch all exception when dump the result to json
and fix typo
# :mag: Description
## Issue References ๐
Fix typo in execute_python and catch all the exceptions when dumps the
object to json.
This pull request fixes #
## Describe Your Solution ๐ง
Please include a summary of the change and which issue is fixed. Please
also include relevant motivation and context. List any dependencies that are
required for this change.
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
## Test Plan ๐งช
#### Behavior Without This Pull Request :coffin:
#### Behavior With This Pull Request :tada:
#### Related Unit Tests
---
# Checklist ๐
- [ ] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6039 from turboFei/python_exit.
Closes #5887
b675299e7 [Fei Wang] catch all exception
Authored-by: Fei Wang <[email protected]>
Signed-off-by: Fei Wang <[email protected]>
---
.../src/main/resources/python/execute_python.py | 16 +++++++++++++++-
.../apache/kyuubi/engine/spark/PySparkTests.scala | 20 ++++++++++++++++++++
2 files changed, 35 insertions(+), 1 deletion(-)
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py
b/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py
index 6729092f7..f33c10c40 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py
+++
b/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py
@@ -497,7 +497,21 @@ def main():
"content": {
"status": "error",
"ename": "ValueError",
- "evalue": "cannot json-ify %s" % response,
+ "evalue": "cannot json-ify %s" % result,
+ "traceback": [],
+ },
+ }
+ )
+ except Exception:
+ exc_type, exc_value, tb = sys.exc_info()
+ result = json.dumps(
+ {
+ "msg_type": "inspect_reply",
+ "content": {
+ "status": "error",
+ "ename": str(exc_type.__name__),
+ "evalue": "cannot json-ify %s: %s"
+ % (result, str(exc_value)),
"traceback": [],
},
}
diff --git
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
index c723dcf4a..435872876 100644
---
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
+++
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
@@ -187,6 +187,26 @@ class PySparkTests extends WithKyuubiServer with
HiveJDBCTestHelper {
}
}
+ test("catch all exception when dump the result to json") {
+ checkPythonRuntimeAndVersion()
+ withSessionConf()(Map(KyuubiConf.ENGINE_SPARK_PYTHON_MAGIC_ENABLED.key ->
"true"))() {
+ withMultipleConnectionJdbcStatement()({ stmt =>
+ val statement = stmt.asInstanceOf[KyuubiStatement]
+ statement.executePython("l = [('Alice', 1)]")
+ statement.executePython("df = spark.createDataFrame(l)")
+ val errorMsg = intercept[KyuubiSQLException] {
+ statement.executePython("%json df")
+ }.getMessage
+ assert(errorMsg.contains("Object of type DataFrame is not JSON
serializable"))
+
+ statement.executePython("df = spark.createDataFrame(l).collect()")
+ val result = statement.executePython("%json df")
+ assert(result.next())
+ assert(result.getString("output") ==
"{\"application/json\":[[\"Alice\",1]]}")
+ })
+ }
+ }
+
private def runPySparkTest(
pyCode: String,
output: String): Unit = {