pan3793 commented on code in PR #52988:
URL: https://github.com/apache/spark/pull/52988#discussion_r2512696477
##########
sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala:
##########
@@ -248,4 +248,59 @@ class SparkConnectJdbcDataTypeSuite extends
ConnectFunSuite with RemoteSparkSess
}
}
}
+
+ test("getter functions column index out of bound") {
+ Seq(
+ ("'foo'", (rs: ResultSet) => rs.getString(999)),
+ ("true", (rs: ResultSet) => rs.getBoolean(999)),
+ ("cast(1 as byte)", (rs: ResultSet) => rs.getByte(999)),
+ ("cast(1 as short)", (rs: ResultSet) => rs.getShort(999)),
+ ("cast(1 as int)", (rs: ResultSet) => rs.getInt(999)),
+ ("cast(1 as bigint)", (rs: ResultSet) => rs.getLong(999)),
+ ("cast(1 as float)", (rs: ResultSet) => rs.getFloat(999)),
+ ("cast(1 as double)", (rs: ResultSet) => rs.getDouble(999)),
+ ("cast(1 as DECIMAL(10,5))", (rs: ResultSet) => rs.getBigDecimal(999))
+ ).foreach {
+ case (query, getter) =>
+ withExecuteQuery(s"SELECT $query") { rs =>
+ assert(rs.next())
+ withClue("SQLException is not thrown when the result set index goes
out of bound") {
+ intercept[SQLException] {
+ getter(rs)
+ }
+ }
+ }
+ }
+ }
+
+ test("getter functions called after statement closed") {
+ Seq(
+ ("'foo'", (rs: ResultSet) => rs.getString(1), "foo"),
+ ("true", (rs: ResultSet) => rs.getBoolean(1), true),
+ ("cast(1 as byte)", (rs: ResultSet) => rs.getByte(1), 1.toByte),
Review Comment:
Can we have SQL keyword (except for functions) UPPER_CASE? e.g. `cast(1 AS
BYTE)`
##########
sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala:
##########
@@ -248,4 +248,59 @@ class SparkConnectJdbcDataTypeSuite extends
ConnectFunSuite with RemoteSparkSess
}
}
}
+
+ test("getter functions column index out of bound") {
+ Seq(
+ ("'foo'", (rs: ResultSet) => rs.getString(999)),
+ ("true", (rs: ResultSet) => rs.getBoolean(999)),
+ ("cast(1 as byte)", (rs: ResultSet) => rs.getByte(999)),
+ ("cast(1 as short)", (rs: ResultSet) => rs.getShort(999)),
+ ("cast(1 as int)", (rs: ResultSet) => rs.getInt(999)),
+ ("cast(1 as bigint)", (rs: ResultSet) => rs.getLong(999)),
+ ("cast(1 as float)", (rs: ResultSet) => rs.getFloat(999)),
+ ("cast(1 as double)", (rs: ResultSet) => rs.getDouble(999)),
+ ("cast(1 as DECIMAL(10,5))", (rs: ResultSet) => rs.getBigDecimal(999))
+ ).foreach {
+ case (query, getter) =>
+ withExecuteQuery(s"SELECT $query") { rs =>
+ assert(rs.next())
+ withClue("SQLException is not thrown when the result set index goes
out of bound") {
+ intercept[SQLException] {
+ getter(rs)
+ }
+ }
+ }
+ }
+ }
+
+ test("getter functions called after statement closed") {
+ Seq(
+ ("'foo'", (rs: ResultSet) => rs.getString(1), "foo"),
+ ("true", (rs: ResultSet) => rs.getBoolean(1), true),
+ ("cast(1 as byte)", (rs: ResultSet) => rs.getByte(1), 1.toByte),
+ ("cast(1 as short)", (rs: ResultSet) => rs.getShort(1), 1.toShort),
+ ("cast(1 as int)", (rs: ResultSet) => rs.getInt(1), 1.toInt),
+ ("cast(1 as bigint)", (rs: ResultSet) => rs.getLong(1), 1.toLong),
+ ("cast(1 as float)", (rs: ResultSet) => rs.getFloat(1), 1.toFloat),
+ ("cast(1 as double)", (rs: ResultSet) => rs.getDouble(1), 1.toDouble),
+ ("cast(1 as DECIMAL(10,5))", (rs: ResultSet) => rs.getBigDecimal(1),
+ new java.math.BigDecimal("1.00000"))
+ ).foreach {
+ case (query, getter, value) =>
Review Comment:
```suggestion
case (query, getter, expectedValue) =>
```
##########
sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala:
##########
@@ -76,6 +76,22 @@ class SparkConnectResultSet(
}
}
+ private[jdbc] def getField[T](columnIndex: Int)(get: Int => T): Option[T] = {
Review Comment:
this would be a hot path for a large query result set retrieval, so let's
pass the default value as a parameter instead of returning `Option` to let the
caller unwarp again
nit: "field" is usually used for object attribute, I feel `getColumnValue`
or `getVal` is better.
```suggestion
private[jdbc] def getVal[T](columnIndex: Int, defaultVal: Any)(getter: Int
=> T): Option[T] = {
```
##########
sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala:
##########
@@ -248,4 +248,59 @@ class SparkConnectJdbcDataTypeSuite extends
ConnectFunSuite with RemoteSparkSess
}
}
}
+
+ test("getter functions column index out of bound") {
+ Seq(
+ ("'foo'", (rs: ResultSet) => rs.getString(999)),
+ ("true", (rs: ResultSet) => rs.getBoolean(999)),
+ ("cast(1 as byte)", (rs: ResultSet) => rs.getByte(999)),
+ ("cast(1 as short)", (rs: ResultSet) => rs.getShort(999)),
+ ("cast(1 as int)", (rs: ResultSet) => rs.getInt(999)),
+ ("cast(1 as bigint)", (rs: ResultSet) => rs.getLong(999)),
+ ("cast(1 as float)", (rs: ResultSet) => rs.getFloat(999)),
+ ("cast(1 as double)", (rs: ResultSet) => rs.getDouble(999)),
+ ("cast(1 as DECIMAL(10,5))", (rs: ResultSet) => rs.getBigDecimal(999))
+ ).foreach {
+ case (query, getter) =>
+ withExecuteQuery(s"SELECT $query") { rs =>
+ assert(rs.next())
+ withClue("SQLException is not thrown when the result set index goes
out of bound") {
+ intercept[SQLException] {
+ getter(rs)
+ }
Review Comment:
please check the returned message for `SQLException`
nit: I feel the logic here is clear enough, `withClue` may not be required.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]