This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-4.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.1 by this push:
new eefe0ca49474 [SPARK-54540][CONNECT] Couples of minor fix for Connect
JDBC driver
eefe0ca49474 is described below
commit eefe0ca49474544f47abb08153f86a331b0f6810
Author: Cheng Pan <[email protected]>
AuthorDate: Thu Nov 27 16:49:02 2025 -0800
[SPARK-54540][CONNECT] Couples of minor fix for Connect JDBC driver
### What changes were proposed in this pull request?
Couples of minor fix for Connect JDBC driver
- fix typos
- use import instead of the inline full package class reference
- a missing call `checkOpen`
- remove duplicated scaladocs generated by AI.
### Why are the changes needed?
Minor fix.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Existing tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #53248 from pan3793/SPARK-54540.
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
(cherry picked from commit 68653991a4ddc499e8d7fd265ecebee2aa4c77b4)
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../client/jdbc/SparkConnectConnection.scala | 4 +-
.../client/jdbc/SparkConnectResultSet.scala | 60 ++++++++++++----------
.../connect/client/jdbc/util/JdbcErrorUtils.scala | 4 +-
3 files changed, 36 insertions(+), 32 deletions(-)
diff --git
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectConnection.scala
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectConnection.scala
index 95ec956771db..21b9471bb606 100644
---
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectConnection.scala
+++
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectConnection.scala
@@ -185,7 +185,7 @@ class SparkConnectConnection(val url: String, val info:
Properties) extends Conn
if (level != Connection.TRANSACTION_NONE) {
throw new SQLFeatureNotSupportedException(
"Requested transaction isolation level " +
- s"${stringfiyTransactionIsolationLevel(level)} is not supported")
+ s"${stringifyTransactionIsolationLevel(level)} is not supported")
}
}
@@ -207,7 +207,7 @@ class SparkConnectConnection(val url: String, val info:
Properties) extends Conn
override def setHoldability(holdability: Int): Unit = {
if (holdability != ResultSet.HOLD_CURSORS_OVER_COMMIT) {
throw new SQLFeatureNotSupportedException(
- s"Holdability ${stringfiyHoldability(holdability)} is not supported")
+ s"Holdability ${stringifyHoldability(holdability)} is not supported")
}
}
diff --git
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala
index 8b2d4578d0aa..0070cbd93c3e 100644
---
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala
+++
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala
@@ -20,7 +20,8 @@ package org.apache.spark.sql.connect.client.jdbc
import java.io.{InputStream, Reader}
import java.net.URL
import java.sql.{Array => JdbcArray, _}
-import java.time.LocalTime
+import java.time.{LocalDateTime, LocalTime}
+import java.time.temporal.ChronoUnit
import java.util
import java.util.Calendar
@@ -44,9 +45,15 @@ class SparkConnectResultSet(
private var cursor: Int = 0
private var _wasNull: Boolean = false
- override def wasNull: Boolean = _wasNull
+
+ override def wasNull: Boolean = {
+ checkOpen()
+ _wasNull
+ }
override def next(): Boolean = {
+ checkOpen()
+
val hasNext = iterator.hasNext
if (hasNext) {
currentRow = iterator.next()
@@ -60,7 +67,7 @@ class SparkConnectResultSet(
hasNext
}
- @volatile protected var closed: Boolean = false
+ @volatile private var closed: Boolean = false
override def isClosed: Boolean = closed
@@ -97,6 +104,8 @@ class SparkConnectResultSet(
}
override def findColumn(columnLabel: String): Int = {
+ checkOpen()
+
sparkResult.schema.getFieldIndex(columnLabel) match {
case Some(i) => i + 1
case None =>
@@ -156,9 +165,8 @@ class SparkConnectResultSet(
// Note: java.sql.Time can only store up to millisecond precision (3
digits).
// For TIME types with higher precision (TIME(4-9)),
microseconds/nanoseconds are truncated.
// If user needs full precision,
- // should use: getObject(columnIndex, classOf[java.time.LocalTime])
- val millisSinceMidnight =
- java.time.temporal.ChronoUnit.MILLIS.between(LocalTime.MIDNIGHT,
localTime)
+ // should use: getObject(columnIndex, classOf[LocalTime])
+ val millisSinceMidnight = ChronoUnit.MILLIS.between(LocalTime.MIDNIGHT,
localTime)
new Time(millisSinceMidnight)
}
}
@@ -172,9 +180,9 @@ class SparkConnectResultSet(
sparkResult.schema.fields(idx).dataType match {
case TimestampNTZType =>
// TIMESTAMP_NTZ is represented as LocalDateTime
- Timestamp.valueOf(value.asInstanceOf[java.time.LocalDateTime])
+ Timestamp.valueOf(value.asInstanceOf[LocalDateTime])
case TimestampType =>
- // TIMESTAMP is represented as java.sql.Timestamp
+ // TIMESTAMP is represented as Timestamp
value.asInstanceOf[Timestamp]
case other =>
throw new SQLException(
@@ -533,52 +541,46 @@ class SparkConnectResultSet(
throw new SQLFeatureNotSupportedException
/**
- * Gets the value of the designated column in the current row as a
java.sql.Date object.
- * The Calendar parameter is ignored for Date type since it is not
timezone-aware.
+ * @inheritdoc
*
- * @param columnIndex the first column is 1, the second is 2, ...
- * @param cal the Calendar to use in constructing the date (ignored for Date
type)
- * @return the column value; if the value is SQL NULL, the value returned is
null
+ * Note: The Calendar parameter is ignored. Spark Connect handles timezone
conversions
+ * server-side to avoid client/server timezone inconsistencies.
*/
override def getDate(columnIndex: Int, cal: Calendar): Date = {
getDate(columnIndex)
}
/**
- * Gets the value of the designated column in the current row as a
java.sql.Date object.
- * The Calendar parameter is ignored for Date type since it is not
timezone-aware.
+ * @inheritdoc
*
- * @param columnLabel the label for the column specified with the SQL AS
clause
- * @param cal the Calendar to use in constructing the date (ignored for Date
type)
- * @return the column value; if the value is SQL NULL, the value returned is
null
+ * Note: The Calendar parameter is ignored. Spark Connect handles timezone
conversions
+ * server-side to avoid client/server timezone inconsistencies.
*/
override def getDate(columnLabel: String, cal: Calendar): Date =
getDate(findColumn(columnLabel))
/**
- * Gets the value of the designated column in the current row as a
java.sql.Time object.
- * The Calendar parameter is ignored for Time type since it is not
timezone-aware.
+ * @inheritdoc
*
- * @param columnIndex the first column is 1, the second is 2, ...
- * @param cal the Calendar to use in constructing the time (ignored for Time
type)
- * @return the column value; if the value is SQL NULL, the value returned is
null
+ * Note: The Calendar parameter is ignored. Spark Connect handles timezone
conversions
+ * server-side to avoid client/server timezone inconsistencies.
*/
override def getTime(columnIndex: Int, cal: Calendar): Time = {
getTime(columnIndex)
}
/**
- * Gets the value of the designated column in the current row as a
java.sql.Time object.
- * The Calendar parameter is ignored for Time type since it is not
timezone-aware.
+ * @inheritdoc
*
- * @param columnLabel the label for the column specified with the SQL AS
clause
- * @param cal the Calendar to use in constructing the time (ignored for Time
type)
- * @return the column value; if the value is SQL NULL, the value returned is
null
+ * Note: The Calendar parameter is ignored. Spark Connect handles timezone
conversions
+ * server-side to avoid client/server timezone inconsistencies.
*/
override def getTime(columnLabel: String, cal: Calendar): Time =
getTime(findColumn(columnLabel))
/**
+ * @inheritdoc
+ *
* Note: The Calendar parameter is ignored. Spark Connect handles timezone
conversions
* server-side to avoid client/server timezone inconsistencies.
*/
@@ -587,6 +589,8 @@ class SparkConnectResultSet(
}
/**
+ * @inheritdoc
+ *
* Note: The Calendar parameter is ignored. Spark Connect handles timezone
conversions
* server-side to avoid client/server timezone inconsistencies.
*/
diff --git
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcErrorUtils.scala
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcErrorUtils.scala
index 3d9f72d87d15..6480c5d768f3 100644
---
a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcErrorUtils.scala
+++
b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcErrorUtils.scala
@@ -21,7 +21,7 @@ import java.sql.{Array => _, _}
private[jdbc] object JdbcErrorUtils {
- def stringfiyTransactionIsolationLevel(level: Int): String = level match {
+ def stringifyTransactionIsolationLevel(level: Int): String = level match {
case Connection.TRANSACTION_NONE => "NONE"
case Connection.TRANSACTION_READ_UNCOMMITTED => "READ_UNCOMMITTED"
case Connection.TRANSACTION_READ_COMMITTED => "READ_COMMITTED"
@@ -31,7 +31,7 @@ private[jdbc] object JdbcErrorUtils {
throw new IllegalArgumentException(s"Invalid transaction isolation
level: $level")
}
- def stringfiyHoldability(holdability: Int): String = holdability match {
+ def stringifyHoldability(holdability: Int): String = holdability match {
case ResultSet.HOLD_CURSORS_OVER_COMMIT => "HOLD_CURSORS_OVER_COMMIT"
case ResultSet.CLOSE_CURSORS_AT_COMMIT => "CLOSE_CURSORS_AT_COMMIT"
case _ =>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]