This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new a8f134e3c [KYUUBI #5030] Support get query id in Spark engine
a8f134e3c is described below
commit a8f134e3c6fd9580a2d111952cfe7f80e6ea373d
Author: yikaifei <[email protected]>
AuthorDate: Thu Jul 13 12:08:31 2023 +0800
[KYUUBI #5030] Support get query id in Spark engine
### _Why are the changes needed?_
This PR aims to support `getQueryId` in Spark engine. It get
`sparl.sql.execution.id` by adding a Listener.
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
Closes #5037 from Yikf/spark-queryid.
Closes #5030
9f2b5a3cb [yikaifei] Support get query id in Spark engine
Authored-by: yikaifei <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../kyuubi/engine/spark/operation/SparkSQLOperationManager.scala | 2 +-
.../kyuubi/engine/spark/operation/SparkOperationSuite.scala | 9 +++++++++
2 files changed, 10 insertions(+), 1 deletion(-)
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/SparkSQLOperationManager.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/SparkSQLOperationManager.scala
index cd9302cbf..ab0828746 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/SparkSQLOperationManager.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/SparkSQLOperationManager.scala
@@ -231,6 +231,6 @@ class SparkSQLOperationManager private (name: String)
extends OperationManager(n
}
override def getQueryId(operation: Operation): String = {
- throw KyuubiSQLException.featureNotSupported()
+ operation.getHandle.identifier.toString
}
}
diff --git
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index f5d265422..adab0231d 100644
---
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -35,6 +35,7 @@ import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.engine.spark.WithSparkSQLEngine
import org.apache.kyuubi.engine.spark.schema.SchemaHelper.TIMESTAMP_NTZ
import org.apache.kyuubi.engine.spark.util.SparkCatalogUtils
+import org.apache.kyuubi.jdbc.hive.KyuubiStatement
import org.apache.kyuubi.operation.{HiveMetadataTests, SparkQueryTests}
import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant._
import org.apache.kyuubi.util.KyuubiHadoopUtils
@@ -728,6 +729,14 @@ class SparkOperationSuite extends WithSparkSQLEngine with
HiveMetadataTests with
}
}
+ test("KYUUBI #5030: Support get query id in Spark engine") {
+ withJdbcStatement() { stmt =>
+ stmt.executeQuery("SELECT 1")
+ val queryId = stmt.asInstanceOf[KyuubiStatement].getQueryId
+ assert(queryId != null && queryId.nonEmpty)
+ }
+ }
+
private def whenMetaStoreURIsSetTo(uris: String)(func: String => Unit): Unit
= {
val conf = spark.sparkContext.hadoopConfiguration
val origin = conf.get("hive.metastore.uris", "")