This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 7cbfc2cad078 [SPARK-50422][SQL] Make `Parameterized SQL queries` of
`SparkSession.sql` API GA
7cbfc2cad078 is described below
commit 7cbfc2cad078f40c290eff6ffd90e4902397314a
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Nov 26 10:56:38 2024 +0100
[SPARK-50422][SQL] Make `Parameterized SQL queries` of `SparkSession.sql`
API GA
### What changes were proposed in this pull request?
This PR aims to make `Parameterized SQL queries` of `SparkSession.sql` API
GA in Apache Spark 4.0.0.
### Why are the changes needed?
Apache Spark has been supported `Parameterized SQL queries` because they
are very convenient usage for the users .
- https://github.com/apache/spark/pull/38864 (Since Spark 3.4.0)
- https://github.com/apache/spark/pull/41568 (Since Spark 3.5.0)
It's time to make it GA by removing `Experimental` tags since this feature
has been serving well for a long time.
### Does this PR introduce _any_ user-facing change?
No, there is no behavior change.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #48965 from dongjoon-hyun/SPARK-50422.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala | 3 ---
sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala | 3 ---
sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala | 3 ---
3 files changed, 9 deletions(-)
diff --git
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 231c604b98bb..b74d0c2ff224 100644
---
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -210,7 +210,6 @@ class SparkSession private[sql] (
throw ConnectClientUnsupportedErrors.executeCommand()
/** @inheritdoc */
- @Experimental
def sql(sqlText: String, args: Array[_]): DataFrame = {
val sqlCommand = proto.SqlCommand
.newBuilder()
@@ -221,13 +220,11 @@ class SparkSession private[sql] (
}
/** @inheritdoc */
- @Experimental
def sql(sqlText: String, args: Map[String, Any]): DataFrame = {
sql(sqlText, args.asJava)
}
/** @inheritdoc */
- @Experimental
override def sql(sqlText: String, args: java.util.Map[String, Any]):
DataFrame = {
val sqlCommand = proto.SqlCommand
.newBuilder()
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala
b/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala
index 64b0a87c573d..35f74497b96f 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala
@@ -470,7 +470,6 @@ abstract class SparkSession extends Serializable with
Closeable {
* is.
* @since 3.5.0
*/
- @Experimental
def sql(sqlText: String, args: Array[_]): Dataset[Row]
/**
@@ -488,7 +487,6 @@ abstract class SparkSession extends Serializable with
Closeable {
* `array()`, `struct()`, in that case it is taken as is.
* @since 3.4.0
*/
- @Experimental
def sql(sqlText: String, args: Map[String, Any]): Dataset[Row]
/**
@@ -506,7 +504,6 @@ abstract class SparkSession extends Serializable with
Closeable {
* `array()`, `struct()`, in that case it is taken as is.
* @since 3.4.0
*/
- @Experimental
def sql(sqlText: String, args: util.Map[String, Any]): Dataset[Row] = {
sql(sqlText, args.asScala.toMap)
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index a7f85db12b21..8cf30fb39f31 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -460,7 +460,6 @@ class SparkSession private(
}
/** @inheritdoc */
- @Experimental
def sql(sqlText: String, args: Array[_]): DataFrame = {
sql(sqlText, args, new QueryPlanningTracker)
}
@@ -498,13 +497,11 @@ class SparkSession private(
}
/** @inheritdoc */
- @Experimental
def sql(sqlText: String, args: Map[String, Any]): DataFrame = {
sql(sqlText, args, new QueryPlanningTracker)
}
/** @inheritdoc */
- @Experimental
override def sql(sqlText: String, args: java.util.Map[String, Any]):
DataFrame = {
sql(sqlText, args.asScala.toMap)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]