karenfeng commented on a change in pull request #34168:
URL: https://github.com/apache/spark/pull/34168#discussion_r723749452
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1297,74 +1297,87 @@ object QueryExecutionErrors {
}
def serDeInterfaceNotFoundError(e: NoClassDefFoundError): Throwable = {
- new ClassNotFoundException("The SerDe interface removed since Hive
2.3(HIVE-15167)." +
- " Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more
details.", e)
+ new SparkClassNotFoundException(
+ errorClass = "SERDE_INTERFACE_NOT_FOUND",
+ messageParameters = Array.empty, e)
}
def convertHiveTableToCatalogTableError(
e: SparkException, dbName: String, tableName: String): Throwable = {
- new SparkException(s"${e.getMessage}, db: $dbName, table: $tableName", e)
+ new SparkException(
+ errorClass = "CANNOT_CONVERT_HIVE_TABLE_TO_CATALOG_TABLE",
+ messageParameters = Array(e.getMessage, dbName, tableName), e)
}
def cannotRecognizeHiveTypeError(
e: ParseException, fieldType: String, fieldName: String): Throwable = {
new SparkException(
- s"Cannot recognize hive type string: $fieldType, column: $fieldName", e)
+ errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+ messageParameters = Array(fieldType, fieldName), e)
}
def getTablesByTypeUnsupportedByHiveVersionError(): Throwable = {
- new UnsupportedOperationException("Hive 2.2 and lower versions don't
support " +
- "getTablesByType. Please use Hive 2.3 or higher version.")
+ new SparkUnsupportedOperationException(
+ errorClass = "GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION",
+ messageParameters = Array.empty
+ )
}
def dropTableWithPurgeUnsupportedError(): Throwable = {
- new UnsupportedOperationException("DROP TABLE ... PURGE")
+ new SparkUnsupportedOperationException(
+ errorClass = "DROP_TABLE_WITH_PURGE_UNSUPPORTED",
+ messageParameters = Array.empty
+ )
}
def alterTableWithDropPartitionAndPurgeUnsupportedError(): Throwable = {
- new UnsupportedOperationException("ALTER TABLE ... DROP PARTITION ...
PURGE")
+ new SparkUnsupportedOperationException(
+ errorClass = "ALTER_TABLE_WITH_DROP_PARTITION_AND_PURGE_UNSUPPORTED",
+ messageParameters = Array.empty
+ )
}
def invalidPartitionFilterError(): Throwable = {
- new UnsupportedOperationException(
- """Partition filter cannot have both `"` and `'` characters""")
+ new SparkUnsupportedOperationException(
+ errorClass = "INVALID_PARTITION_FILTER",
+ messageParameters = Array.empty
+ )
}
def getPartitionMetadataByFilterError(e: InvocationTargetException):
Throwable = {
- new RuntimeException(
- s"""
- |Caught Hive MetaException attempting to get partition metadata by
filter
- |from Hive. You can set the Spark configuration setting
- |${SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION} to
true to work around
- |this problem, however this will result in degraded performance.
Please
- |report a bug: https://issues.apache.org/jira/browse/SPARK
- """.stripMargin.replaceAll("\n", " "), e)
+ new SparkRuntimeException(
+ errorClass = "CANNOT_GET_PARTITION_METADATA_BY_FILTER",
+ messageParameters = Array(
+
SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION.toString),
+ e)
}
def unsupportedHiveMetastoreVersionError(version: String, key: String):
Throwable = {
- new UnsupportedOperationException(s"Unsupported Hive Metastore version
($version). " +
- s"Please set $key with a valid version.")
+ new SparkUnsupportedOperationException(
+ errorClass = "UNSUPPORTED_HIVE_METASTORE_VERSION",
+ messageParameters = Array(version, key))
}
def loadHiveClientCausesNoClassDefFoundError(
cnf: NoClassDefFoundError,
execJars: Seq[URL],
key: String,
e: InvocationTargetException): Throwable = {
- new ClassNotFoundException(
- s"""
- |$cnf when creating Hive client using classpath:
${execJars.mkString(", ")}\n
- |Please make sure that jars for your version of hive and hadoop are
included in the
- |paths passed to $key.
- """.stripMargin.replaceAll("\n", " "), e)
+ new SparkClassNotFoundException(
+ errorClass = "LOAD_HIVE_CLIENT_CAUSES_NO_CLASS_DEFINITION_FOUND",
+ messageParameters = Array(cnf.toString, execJars.mkString(", "), key), e)
}
def cannotFetchTablesOfDatabaseError(dbName: String, e: Exception):
Throwable = {
- new SparkException(s"Unable to fetch tables of db $dbName", e)
+ new SparkException(
+ errorClass = "CANNOT_FETCH DATABASE_TABLES",
Review comment:
Missing underscore here
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1297,74 +1297,87 @@ object QueryExecutionErrors {
}
def serDeInterfaceNotFoundError(e: NoClassDefFoundError): Throwable = {
- new ClassNotFoundException("The SerDe interface removed since Hive
2.3(HIVE-15167)." +
- " Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more
details.", e)
+ new SparkClassNotFoundException(
+ errorClass = "SERDE_INTERFACE_NOT_FOUND",
+ messageParameters = Array.empty, e)
}
def convertHiveTableToCatalogTableError(
e: SparkException, dbName: String, tableName: String): Throwable = {
- new SparkException(s"${e.getMessage}, db: $dbName, table: $tableName", e)
+ new SparkException(
+ errorClass = "CANNOT_CONVERT_HIVE_TABLE_TO_CATALOG_TABLE",
+ messageParameters = Array(e.getMessage, dbName, tableName), e)
}
def cannotRecognizeHiveTypeError(
e: ParseException, fieldType: String, fieldName: String): Throwable = {
new SparkException(
- s"Cannot recognize hive type string: $fieldType, column: $fieldName", e)
+ errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+ messageParameters = Array(fieldType, fieldName), e)
}
def getTablesByTypeUnsupportedByHiveVersionError(): Throwable = {
- new UnsupportedOperationException("Hive 2.2 and lower versions don't
support " +
- "getTablesByType. Please use Hive 2.3 or higher version.")
+ new SparkUnsupportedOperationException(
+ errorClass = "GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION",
+ messageParameters = Array.empty
+ )
}
def dropTableWithPurgeUnsupportedError(): Throwable = {
- new UnsupportedOperationException("DROP TABLE ... PURGE")
+ new SparkUnsupportedOperationException(
+ errorClass = "DROP_TABLE_WITH_PURGE_UNSUPPORTED",
+ messageParameters = Array.empty
+ )
}
def alterTableWithDropPartitionAndPurgeUnsupportedError(): Throwable = {
- new UnsupportedOperationException("ALTER TABLE ... DROP PARTITION ...
PURGE")
+ new SparkUnsupportedOperationException(
+ errorClass = "ALTER_TABLE_WITH_DROP_PARTITION_AND_PURGE_UNSUPPORTED",
+ messageParameters = Array.empty
+ )
}
def invalidPartitionFilterError(): Throwable = {
- new UnsupportedOperationException(
- """Partition filter cannot have both `"` and `'` characters""")
+ new SparkUnsupportedOperationException(
+ errorClass = "INVALID_PARTITION_FILTER",
+ messageParameters = Array.empty
+ )
}
def getPartitionMetadataByFilterError(e: InvocationTargetException):
Throwable = {
- new RuntimeException(
- s"""
- |Caught Hive MetaException attempting to get partition metadata by
filter
- |from Hive. You can set the Spark configuration setting
- |${SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION} to
true to work around
- |this problem, however this will result in degraded performance.
Please
- |report a bug: https://issues.apache.org/jira/browse/SPARK
- """.stripMargin.replaceAll("\n", " "), e)
+ new SparkRuntimeException(
+ errorClass = "CANNOT_GET_PARTITION_METADATA_BY_FILTER",
Review comment:
We just introduced `INTERNAL_ERROR`; can you use that instead? This will
be useful for minimizing clutter.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]