This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 5132ab1f7b33 [SPARK-49877][SQL] Change classifyException function
signature: add isRuntime argument
5132ab1f7b33 is described below
commit 5132ab1f7b336d0fa845e6fdfe6e2dabcc2dedc0
Author: ivanjevtic-db <[email protected]>
AuthorDate: Mon Oct 7 14:55:39 2024 +0200
[SPARK-49877][SQL] Change classifyException function signature: add
isRuntime argument
### What changes were proposed in this pull request?
The proposal is to update the classifyException function so that it can
return either ```AnalysisException``` or ```SparkRuntimeException```. This is
achieved by adding a new parameter, ```isRuntime```, and modifying the return
type to be ```Throwable with SparkThrowable``` for compatibility with both
types.
### Why are the changes needed?
The changes are needed to allow the classifyException function to be used
in execution part of the code.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Not needed.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #48351 from
ivanjevtic-db/Change-classify-exception-function-signature.
Lead-authored-by: ivanjevtic-db <[email protected]>
Co-authored-by: Ivan Jevtic <[email protected]>
Co-authored-by: milastdbx <[email protected]>
Co-authored-by: Maxim Gekk <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../sql/execution/datasources/jdbc/JdbcUtils.scala | 5 +--
.../execution/datasources/v2/jdbc/JDBCTable.scala | 6 ++--
.../datasources/v2/jdbc/JDBCTableCatalog.scala | 36 ++++++++++++++--------
.../org/apache/spark/sql/jdbc/DB2Dialect.scala | 12 ++++----
.../org/apache/spark/sql/jdbc/H2Dialect.scala | 8 ++---
.../org/apache/spark/sql/jdbc/JdbcDialects.scala | 26 +++++++++++-----
.../apache/spark/sql/jdbc/MsSqlServerDialect.scala | 10 +++---
.../org/apache/spark/sql/jdbc/MySQLDialect.scala | 11 ++++---
.../org/apache/spark/sql/jdbc/OracleDialect.scala | 11 ++++---
.../apache/spark/sql/jdbc/PostgresDialect.scala | 12 +++++---
10 files changed, 84 insertions(+), 53 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index f7d2d61eab65..7946068b9452 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -1262,13 +1262,14 @@ object JdbcUtils extends Logging with SQLConfHelper {
errorClass: String,
messageParameters: Map[String, String],
dialect: JdbcDialect,
- description: String)(f: => T): T = {
+ description: String,
+ isRuntime: Boolean)(f: => T): T = {
try {
f
} catch {
case e: SparkThrowable with Throwable => throw e
case e: Throwable =>
- throw dialect.classifyException(e, errorClass, messageParameters,
description)
+ throw dialect.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
index 6828bb0f0c4d..20283cc12459 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
@@ -70,7 +70,8 @@ case class JDBCTable(ident: Identifier, schema: StructType,
jdbcOptions: JDBCOpt
"indexName" -> toSQLId(indexName),
"tableName" -> toSQLId(name)),
dialect = JdbcDialects.get(jdbcOptions.url),
- description = s"Failed to create index $indexName in ${name()}") {
+ description = s"Failed to create index $indexName in ${name()}",
+ isRuntime = false) {
JdbcUtils.createIndex(
conn, indexName, ident, columns, columnsProperties, properties,
jdbcOptions)
}
@@ -92,7 +93,8 @@ case class JDBCTable(ident: Identifier, schema: StructType,
jdbcOptions: JDBCOpt
"indexName" -> toSQLId(indexName),
"tableName" -> toSQLId(name)),
dialect = JdbcDialects.get(jdbcOptions.url),
- description = s"Failed to drop index $indexName in ${name()}") {
+ description = s"Failed to drop index $indexName in ${name()}",
+ isRuntime = false) {
JdbcUtils.dropIndex(conn, indexName, ident, jdbcOptions)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
index 3871bdf50177..99e9abe96518 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
@@ -73,7 +73,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(namespace.toSeq)),
dialect,
- description = s"Failed get tables from: ${namespace.mkString(".")}") {
+ description = s"Failed get tables from: ${namespace.mkString(".")}",
+ isRuntime = false) {
conn.getMetaData.getTables(null, schemaPattern, "%", Array("TABLE"))
}
new Iterator[Identifier] {
@@ -93,7 +94,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
dialect,
- description = s"Failed table existence check: $ident") {
+ description = s"Failed table existence check: $ident",
+ isRuntime = false) {
JdbcUtils.withConnection(options)(JdbcUtils.tableExists(_, writeOptions))
}
}
@@ -120,7 +122,8 @@ class JDBCTableCatalog extends TableCatalog
"oldName" -> toSQLId(oldIdent),
"newName" -> toSQLId(newIdent)),
dialect,
- description = s"Failed table renaming from $oldIdent to $newIdent") {
+ description = s"Failed table renaming from $oldIdent to $newIdent",
+ isRuntime = false) {
JdbcUtils.renameTable(conn, oldIdent, newIdent, options)
}
}
@@ -136,7 +139,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
dialect,
- description = s"Failed to load table: $ident"
+ description = s"Failed to load table: $ident",
+ isRuntime = false
) {
val schema = JDBCRDD.resolveTable(optionsWithTableName)
JDBCTable(ident, schema, optionsWithTableName)
@@ -192,7 +196,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
dialect,
- description = s"Failed table creation: $ident") {
+ description = s"Failed table creation: $ident",
+ isRuntime = false) {
JdbcUtils.createTable(conn, getTableName(ident), schema,
caseSensitive, writeOptions)
}
}
@@ -209,7 +214,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
dialect,
- description = s"Failed table altering: $ident") {
+ description = s"Failed table altering: $ident",
+ isRuntime = false) {
JdbcUtils.alterTable(conn, getTableName(ident), changes, options)
}
loadTable(ident)
@@ -225,7 +231,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(namespace.toSeq)),
dialect,
- description = s"Failed namespace exists: ${namespace.mkString}") {
+ description = s"Failed namespace exists: ${namespace.mkString}",
+ isRuntime = false) {
JdbcUtils.schemaExists(conn, options, db)
}
}
@@ -238,7 +245,8 @@ class JDBCTableCatalog extends TableCatalog
errorClass = "FAILED_JDBC.LIST_NAMESPACES",
messageParameters = Map("url" -> options.getRedactUrl()),
dialect,
- description = s"Failed list namespaces") {
+ description = s"Failed list namespaces",
+ isRuntime = false) {
JdbcUtils.listSchemas(conn, options)
}
}
@@ -292,7 +300,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
dialect,
- description = s"Failed create name space: $db") {
+ description = s"Failed create name space: $db",
+ isRuntime = false) {
JdbcUtils.createSchema(conn, options, db, comment)
}
}
@@ -317,7 +326,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
dialect,
- description = s"Failed create comment on name space: $db") {
+ description = s"Failed create comment on name space: $db",
+ isRuntime = false) {
JdbcUtils.alterSchemaComment(conn, options, db, set.value)
}
}
@@ -334,7 +344,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
dialect,
- description = s"Failed remove comment on name space: $db") {
+ description = s"Failed remove comment on name space: $db",
+ isRuntime = false) {
JdbcUtils.removeSchemaComment(conn, options, db)
}
}
@@ -362,7 +373,8 @@ class JDBCTableCatalog extends TableCatalog
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
dialect,
- description = s"Failed drop name space: $db") {
+ description = s"Failed drop name space: $db",
+ isRuntime = false) {
JdbcUtils.dropSchema(conn, options, db, cascade)
true
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
index f7cf70ac957b..2f54f1f62fde 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
@@ -22,8 +22,7 @@ import java.util.Locale
import scala.util.control.NonFatal
-import org.apache.spark.SparkUnsupportedOperationException
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.{SparkThrowable, SparkUnsupportedOperationException}
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.NonEmptyNamespaceException
import org.apache.spark.sql.connector.catalog.Identifier
@@ -153,12 +152,12 @@ private case class DB2Dialect() extends JdbcDialect with
SQLConfHelper with NoLe
override def removeSchemaCommentQuery(schema: String): String = {
s"COMMENT ON SCHEMA ${quoteIdentifier(schema)} IS ''"
}
-
override def classifyException(
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case sqlException: SQLException =>
sqlException.getSQLState match {
@@ -171,9 +170,10 @@ private case class DB2Dialect() extends JdbcDialect with
SQLConfHelper with NoLe
case "42710" if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ =>
+ super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
index 3ece44ece9e6..798ecb5b36ff 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
@@ -27,8 +27,7 @@ import scala.util.control.NonFatal
import org.apache.commons.lang3.StringUtils
-import org.apache.spark.SparkUnsupportedOperationException
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.{SparkThrowable, SparkUnsupportedOperationException}
import org.apache.spark.sql.catalyst.analysis.{IndexAlreadyExistsException,
NoSuchIndexException, NoSuchNamespaceException, NoSuchTableException,
TableAlreadyExistsException}
import org.apache.spark.sql.connector.catalog.Identifier
import org.apache.spark.sql.connector.catalog.functions.UnboundFunction
@@ -200,7 +199,8 @@ private[sql] case class H2Dialect() extends JdbcDialect
with NoLegacyJDBCError {
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case exception: SQLException =>
// Error codes are from
https://www.h2database.com/javadoc/org/h2/api/ErrorCode.html
@@ -244,7 +244,7 @@ private[sql] case class H2Dialect() extends JdbcDialect
with NoLegacyJDBCError {
}
case _ => // do nothing
}
- super.classifyException(e, errorClass, messageParameters, description)
+ super.classifyException(e, errorClass, messageParameters, description,
isRuntime)
}
override def compileExpression(expr: Expression): Option[String] = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index 290665020f88..3bf1390cb664 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -28,7 +28,7 @@ import scala.util.control.NonFatal
import org.apache.commons.lang3.StringUtils
-import org.apache.spark.SparkUnsupportedOperationException
+import org.apache.spark.{SparkRuntimeException, SparkThrowable,
SparkUnsupportedOperationException}
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
@@ -741,13 +741,15 @@ abstract class JdbcDialect extends Serializable with
Logging {
* @param errorClass The error class assigned in the case of an unclassified
`e`
* @param messageParameters The message parameters of `errorClass`
* @param description The error description
- * @return `AnalysisException` or its sub-class.
+ * @param isRuntime Whether the exception is a runtime exception or not.
+ * @return `SparkThrowable + Throwable` or its sub-class.
*/
def classifyException(
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
classifyException(description, e)
}
@@ -850,11 +852,19 @@ trait NoLegacyJDBCError extends JdbcDialect {
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
- new AnalysisException(
- errorClass = errorClass,
- messageParameters = messageParameters,
- cause = Some(e))
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
+ if (isRuntime) {
+ new SparkRuntimeException(
+ errorClass = errorClass,
+ messageParameters = messageParameters,
+ cause = e)
+ } else {
+ new AnalysisException(
+ errorClass = errorClass,
+ messageParameters = messageParameters,
+ cause = Some(e))
+ }
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
index 369f710edccf..7d476d43e5c7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
@@ -22,7 +22,7 @@ import java.util.Locale
import scala.util.control.NonFatal
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.SparkThrowable
import org.apache.spark.sql.catalyst.analysis.NonEmptyNamespaceException
import org.apache.spark.sql.connector.catalog.Identifier
import org.apache.spark.sql.connector.expressions.{Expression, NullOrdering,
SortDirection}
@@ -207,7 +207,8 @@ private case class MsSqlServerDialect() extends JdbcDialect
with NoLegacyJDBCErr
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case sqlException: SQLException =>
sqlException.getErrorCode match {
@@ -219,9 +220,10 @@ private case class MsSqlServerDialect() extends
JdbcDialect with NoLegacyJDBCErr
case 15335 if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ =>
+ super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
index 785bf5b13aa7..dd0118d87599 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
@@ -24,8 +24,7 @@ import java.util.Locale
import scala.collection.mutable.ArrayBuilder
import scala.util.control.NonFatal
-import org.apache.spark.SparkUnsupportedOperationException
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.{SparkThrowable, SparkUnsupportedOperationException}
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.{IndexAlreadyExistsException,
NoSuchIndexException}
import org.apache.spark.sql.connector.catalog.Identifier
@@ -353,7 +352,8 @@ private case class MySQLDialect() extends JdbcDialect with
SQLConfHelper with No
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case sqlException: SQLException =>
sqlException.getErrorCode match {
@@ -369,10 +369,11 @@ private case class MySQLDialect() extends JdbcDialect
with SQLConfHelper with No
val indexName = messageParameters("indexName")
val tableName = messageParameters("tableName")
throw new NoSuchIndexException(indexName, tableName, cause =
Some(e))
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ =>
+ super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
case unsupported: UnsupportedOperationException => throw unsupported
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
index 6175b5f65993..a73a34c64635 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
@@ -22,8 +22,7 @@ import java.util.Locale
import scala.util.control.NonFatal
-import org.apache.spark.SparkUnsupportedOperationException
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.{SparkThrowable, SparkUnsupportedOperationException}
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.connector.expressions.Expression
import org.apache.spark.sql.errors.QueryCompilationErrors
@@ -236,16 +235,18 @@ private case class OracleDialect() extends JdbcDialect
with SQLConfHelper with N
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case sqlException: SQLException =>
sqlException.getErrorCode match {
case 955 if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ =>
+ super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
index 03fefd82802e..60258ecbb0d6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
@@ -24,9 +24,9 @@ import java.util.Locale
import scala.util.Using
+import org.apache.spark.SparkThrowable
import org.apache.spark.internal.LogKeys.COLUMN_NAME
import org.apache.spark.internal.MDC
-import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.{IndexAlreadyExistsException,
NonEmptyNamespaceException, NoSuchIndexException}
import org.apache.spark.sql.connector.catalog.Identifier
@@ -260,7 +260,8 @@ private case class PostgresDialect()
e: Throwable,
errorClass: String,
messageParameters: Map[String, String],
- description: String): AnalysisException = {
+ description: String,
+ isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case sqlException: SQLException =>
sqlException.getSQLState match {
@@ -279,7 +280,7 @@ private case class PostgresDialect()
if (tblRegexp.nonEmpty) {
throw
QueryCompilationErrors.tableAlreadyExistsError(tblRegexp.get.group(1))
} else {
- super.classifyException(e, errorClass, messageParameters,
description)
+ super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
case "42704" if errorClass == "FAILED_JDBC.DROP_INDEX" =>
@@ -291,10 +292,11 @@ private case class PostgresDialect()
namespace = messageParameters.get("namespace").toArray,
details = sqlException.getMessage,
cause = Some(e))
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ =>
+ super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
case unsupported: UnsupportedOperationException => throw unsupported
- case _ => super.classifyException(e, errorClass, messageParameters,
description)
+ case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]