This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.0 by this push:
new 7ee37d132cf4 [SPARK-49942][SQL] Rename `errorClass` to `condition` in
`classifyException()`
7ee37d132cf4 is described below
commit 7ee37d132cf40b767b75dd45ba716c467c36cc41
Author: Max Gekk <[email protected]>
AuthorDate: Wed Feb 5 19:34:41 2025 +0100
[SPARK-49942][SQL] Rename `errorClass` to `condition` in
`classifyException()`
### What changes were proposed in this pull request?
In the PR, I propose to rename the `errorClass` parameter to `condition` in
`JdbcDialect.classifyException` and in `JdbcUtils.classifyException`.
### Why are the changes needed?
To follow new naming convention introduced by
https://github.com/apache/spark/pull/44902.
### Does this PR introduce _any_ user-facing change?
No. The `classifyException` method has not been released yet.
### How was this patch tested?
By running the existing GitHub actions.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #48433 from MaxGekk/rename-errorClass-classifyException.
Authored-by: Max Gekk <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
(cherry picked from commit f784b3be75423118117dcccd1e216aaaf3390310)
Signed-off-by: Max Gekk <[email protected]>
---
.../sql/execution/datasources/jdbc/JdbcUtils.scala | 4 ++--
.../execution/datasources/v2/jdbc/JDBCTable.scala | 4 ++--
.../datasources/v2/jdbc/JDBCTableCatalog.scala | 24 +++++++++++-----------
.../org/apache/spark/sql/jdbc/DB2Dialect.scala | 8 ++++----
.../org/apache/spark/sql/jdbc/H2Dialect.scala | 8 ++++----
.../org/apache/spark/sql/jdbc/JdbcDialects.scala | 12 +++++------
.../apache/spark/sql/jdbc/MsSqlServerDialect.scala | 8 ++++----
.../org/apache/spark/sql/jdbc/MySQLDialect.scala | 12 +++++------
.../org/apache/spark/sql/jdbc/OracleDialect.scala | 8 ++++----
.../apache/spark/sql/jdbc/PostgresDialect.scala | 14 ++++++-------
10 files changed, 51 insertions(+), 51 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 6a183635c865..651c29d09766 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -1272,7 +1272,7 @@ object JdbcUtils extends Logging with SQLConfHelper {
}
def classifyException[T](
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
dialect: JdbcDialect,
description: String,
@@ -1282,7 +1282,7 @@ object JdbcUtils extends Logging with SQLConfHelper {
} catch {
case e: SparkThrowable with Throwable => throw e
case e: Throwable =>
- throw dialect.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ throw dialect.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
index 20283cc12459..cf9aa4f8682b 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala
@@ -64,7 +64,7 @@ case class JDBCTable(ident: Identifier, schema: StructType,
jdbcOptions: JDBCOpt
properties: util.Map[String, String]): Unit = {
JdbcUtils.withConnection(jdbcOptions) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.CREATE_INDEX",
+ condition = "FAILED_JDBC.CREATE_INDEX",
messageParameters = Map(
"url" -> jdbcOptions.getRedactUrl(),
"indexName" -> toSQLId(indexName),
@@ -87,7 +87,7 @@ case class JDBCTable(ident: Identifier, schema: StructType,
jdbcOptions: JDBCOpt
override def dropIndex(indexName: String): Unit = {
JdbcUtils.withConnection(jdbcOptions) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.DROP_INDEX",
+ condition = "FAILED_JDBC.DROP_INDEX",
messageParameters = Map(
"url" -> jdbcOptions.getRedactUrl(),
"indexName" -> toSQLId(indexName),
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
index 99e9abe96518..715112e35296 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
@@ -68,7 +68,7 @@ class JDBCTableCatalog extends TableCatalog
JdbcUtils.withConnection(options) { conn =>
val schemaPattern = if (namespace.length == 1) namespace.head else null
val rs = JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.GET_TABLES",
+ condition = "FAILED_JDBC.GET_TABLES",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(namespace.toSeq)),
@@ -89,7 +89,7 @@ class JDBCTableCatalog extends TableCatalog
val writeOptions = new JdbcOptionsInWrite(
options.parameters + (JDBCOptions.JDBC_TABLE_NAME ->
getTableName(ident)))
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.TABLE_EXISTS",
+ condition = "FAILED_JDBC.TABLE_EXISTS",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
@@ -116,7 +116,7 @@ class JDBCTableCatalog extends TableCatalog
checkNamespace(oldIdent.namespace())
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.RENAME_TABLE",
+ condition = "FAILED_JDBC.RENAME_TABLE",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"oldName" -> toSQLId(oldIdent),
@@ -134,7 +134,7 @@ class JDBCTableCatalog extends TableCatalog
val optionsWithTableName = new JDBCOptions(
options.parameters + (JDBCOptions.JDBC_TABLE_NAME ->
getTableName(ident)))
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.LOAD_TABLE",
+ condition = "FAILED_JDBC.LOAD_TABLE",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
@@ -191,7 +191,7 @@ class JDBCTableCatalog extends TableCatalog
val schema = CatalogV2Util.v2ColumnsToStructType(columns)
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.CREATE_TABLE",
+ condition = "FAILED_JDBC.CREATE_TABLE",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
@@ -209,7 +209,7 @@ class JDBCTableCatalog extends TableCatalog
checkNamespace(ident.namespace())
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.ALTER_TABLE",
+ condition = "FAILED_JDBC.ALTER_TABLE",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"tableName" -> toSQLId(ident)),
@@ -226,7 +226,7 @@ class JDBCTableCatalog extends TableCatalog
case Array(db) =>
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.NAMESPACE_EXISTS",
+ condition = "FAILED_JDBC.NAMESPACE_EXISTS",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(namespace.toSeq)),
@@ -242,7 +242,7 @@ class JDBCTableCatalog extends TableCatalog
override def listNamespaces(): Array[Array[String]] = {
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.LIST_NAMESPACES",
+ condition = "FAILED_JDBC.LIST_NAMESPACES",
messageParameters = Map("url" -> options.getRedactUrl()),
dialect,
description = s"Failed list namespaces",
@@ -295,7 +295,7 @@ class JDBCTableCatalog extends TableCatalog
}
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.CREATE_NAMESPACE",
+ condition = "FAILED_JDBC.CREATE_NAMESPACE",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
@@ -321,7 +321,7 @@ class JDBCTableCatalog extends TableCatalog
if (set.property() == SupportsNamespaces.PROP_COMMENT) {
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.CREATE_NAMESPACE_COMMENT",
+ condition = "FAILED_JDBC.CREATE_NAMESPACE_COMMENT",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
@@ -339,7 +339,7 @@ class JDBCTableCatalog extends TableCatalog
if (unset.property() == SupportsNamespaces.PROP_COMMENT) {
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.REMOVE_NAMESPACE_COMMENT",
+ condition = "FAILED_JDBC.REMOVE_NAMESPACE_COMMENT",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
@@ -368,7 +368,7 @@ class JDBCTableCatalog extends TableCatalog
case Array(db) if namespaceExists(namespace) =>
JdbcUtils.withConnection(options) { conn =>
JdbcUtils.classifyException(
- errorClass = "FAILED_JDBC.DROP_NAMESPACE",
+ condition = "FAILED_JDBC.DROP_NAMESPACE",
messageParameters = Map(
"url" -> options.getRedactUrl(),
"namespace" -> toSQLId(db)),
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
index e818cc915951..2a62fa8504a0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/DB2Dialect.scala
@@ -160,7 +160,7 @@ private case class DB2Dialect() extends JdbcDialect with
SQLConfHelper with NoLe
}
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
@@ -173,13 +173,13 @@ private case class DB2Dialect() extends JdbcDialect with
SQLConfHelper with NoLe
namespace = messageParameters.get("namespace").toArray,
details = sqlException.getMessage,
cause = Some(e))
- case "42710" if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
+ case "42710" if condition == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
case _ =>
- super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
- case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ case _ => super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
index 798ecb5b36ff..0d9dc88451cc 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala
@@ -197,7 +197,7 @@ private[sql] case class H2Dialect() extends JdbcDialect
with NoLegacyJDBCError {
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
@@ -230,13 +230,13 @@ private[sql] case class H2Dialect() extends JdbcDialect
with NoLegacyJDBCError {
throw new NoSuchNamespaceException(errorClass = "SCHEMA_NOT_FOUND",
messageParameters = Map("schemaName" -> quotedName))
// INDEX_ALREADY_EXISTS_1
- case 42111 if errorClass == "FAILED_JDBC.CREATE_INDEX" =>
+ case 42111 if condition == "FAILED_JDBC.CREATE_INDEX" =>
val indexName = messageParameters("indexName")
val tableName = messageParameters("tableName")
throw new IndexAlreadyExistsException(
indexName = indexName, tableName = tableName, cause = Some(e))
// INDEX_NOT_FOUND_1
- case 42112 if errorClass == "FAILED_JDBC.DROP_INDEX" =>
+ case 42112 if condition == "FAILED_JDBC.DROP_INDEX" =>
val indexName = messageParameters("indexName")
val tableName = messageParameters("tableName")
throw new NoSuchIndexException(indexName, tableName, cause =
Some(e))
@@ -244,7 +244,7 @@ private[sql] case class H2Dialect() extends JdbcDialect
with NoLegacyJDBCError {
}
case _ => // do nothing
}
- super.classifyException(e, errorClass, messageParameters, description,
isRuntime)
+ super.classifyException(e, condition, messageParameters, description,
isRuntime)
}
override def compileExpression(expr: Expression): Option[String] = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index 03541bd892a8..85dd5860e5a5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -752,7 +752,7 @@ abstract class JdbcDialect extends Serializable with
Logging {
/**
* Gets a dialect exception, classifies it and wraps it by
`AnalysisException`.
* @param e The dialect specific exception.
- * @param errorClass The error class assigned in the case of an unclassified
`e`
+ * @param condition The error condition assigned in the case of an
unclassified `e`
* @param messageParameters The message parameters of `errorClass`
* @param description The error description
* @param isRuntime Whether the exception is a runtime exception or not.
@@ -760,7 +760,7 @@ abstract class JdbcDialect extends Serializable with
Logging {
*/
def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
@@ -773,7 +773,7 @@ abstract class JdbcDialect extends Serializable with
Logging {
* @param e The dialect specific exception.
* @return `AnalysisException` or its sub-class.
*/
- @deprecated("Please override the classifyException method with an error
class", "4.0.0")
+ @deprecated("Please override the classifyException method with an error
condition", "4.0.0")
def classifyException(message: String, e: Throwable): AnalysisException = {
new AnalysisException(
errorClass = "FAILED_JDBC.UNCLASSIFIED",
@@ -866,18 +866,18 @@ trait NoLegacyJDBCError extends JdbcDialect {
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
if (isRuntime) {
new SparkRuntimeException(
- errorClass = errorClass,
+ errorClass = condition,
messageParameters = messageParameters,
cause = e)
} else {
new AnalysisException(
- errorClass = errorClass,
+ errorClass = condition,
messageParameters = messageParameters,
cause = Some(e))
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
index 33fb93b168f9..932d08a0b07c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MsSqlServerDialect.scala
@@ -220,7 +220,7 @@ private case class MsSqlServerDialect() extends JdbcDialect
with NoLegacyJDBCErr
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
@@ -232,13 +232,13 @@ private case class MsSqlServerDialect() extends
JdbcDialect with NoLegacyJDBCErr
namespace = messageParameters.get("namespace").toArray,
details = sqlException.getMessage,
cause = Some(e))
- case 15335 if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
+ case 15335 if condition == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
case _ =>
- super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
- case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ case _ => super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
index f93ba1f67d93..77f0d19fc930 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
@@ -350,7 +350,7 @@ private case class MySQLDialect() extends JdbcDialect with
SQLConfHelper with No
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
@@ -358,22 +358,22 @@ private case class MySQLDialect() extends JdbcDialect
with SQLConfHelper with No
case sqlException: SQLException =>
sqlException.getErrorCode match {
// ER_DUP_KEYNAME
- case 1050 if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
+ case 1050 if condition == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
- case 1061 if errorClass == "FAILED_JDBC.CREATE_INDEX" =>
+ case 1061 if condition == "FAILED_JDBC.CREATE_INDEX" =>
val indexName = messageParameters("indexName")
val tableName = messageParameters("tableName")
throw new IndexAlreadyExistsException(indexName, tableName, cause
= Some(e))
- case 1091 if errorClass == "FAILED_JDBC.DROP_INDEX" =>
+ case 1091 if condition == "FAILED_JDBC.DROP_INDEX" =>
val indexName = messageParameters("indexName")
val tableName = messageParameters("tableName")
throw new NoSuchIndexException(indexName, tableName, cause =
Some(e))
case _ =>
- super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
case unsupported: UnsupportedOperationException => throw unsupported
- case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ case _ => super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
index a1540c4489b4..2f1bec1b2242 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
@@ -259,20 +259,20 @@ private case class OracleDialect() extends JdbcDialect
with SQLConfHelper with N
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
e match {
case sqlException: SQLException =>
sqlException.getErrorCode match {
- case 955 if errorClass == "FAILED_JDBC.RENAME_TABLE" =>
+ case 955 if condition == "FAILED_JDBC.RENAME_TABLE" =>
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
case _ =>
- super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
- case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ case _ => super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
index 0d2e0164079b..d7fb12fcba83 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala
@@ -259,7 +259,7 @@ private case class PostgresDialect()
override def classifyException(
e: Throwable,
- errorClass: String,
+ condition: String,
messageParameters: Map[String, String],
description: String,
isRuntime: Boolean): Throwable with SparkThrowable = {
@@ -268,12 +268,12 @@ private case class PostgresDialect()
sqlException.getSQLState match {
// https://www.postgresql.org/docs/14/errcodes-appendix.html
case "42P07" =>
- if (errorClass == "FAILED_JDBC.CREATE_INDEX") {
+ if (condition == "FAILED_JDBC.CREATE_INDEX") {
throw new IndexAlreadyExistsException(
indexName = messageParameters("indexName"),
tableName = messageParameters("tableName"),
cause = Some(e))
- } else if (errorClass == "FAILED_JDBC.RENAME_TABLE") {
+ } else if (condition == "FAILED_JDBC.RENAME_TABLE") {
val newTable = messageParameters("newName")
throw QueryCompilationErrors.tableAlreadyExistsError(newTable)
} else {
@@ -281,10 +281,10 @@ private case class PostgresDialect()
if (tblRegexp.nonEmpty) {
throw
QueryCompilationErrors.tableAlreadyExistsError(tblRegexp.get.group(1))
} else {
- super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
- case "42704" if errorClass == "FAILED_JDBC.DROP_INDEX" =>
+ case "42704" if condition == "FAILED_JDBC.DROP_INDEX" =>
val indexName = messageParameters("indexName")
val tableName = messageParameters("tableName")
throw new NoSuchIndexException(indexName, tableName, cause =
Some(e))
@@ -294,10 +294,10 @@ private case class PostgresDialect()
details = sqlException.getMessage,
cause = Some(e))
case _ =>
- super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
case unsupported: UnsupportedOperationException => throw unsupported
- case _ => super.classifyException(e, errorClass, messageParameters,
description, isRuntime)
+ case _ => super.classifyException(e, condition, messageParameters,
description, isRuntime)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]