allisonwang-db commented on a change in pull request #32478:
URL: https://github.com/apache/spark/pull/32478#discussion_r632029332
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1391,4 +1393,100 @@ private[spark] object QueryCompilationErrors {
def functionUnsupportedInV2CatalogError(): Throwable = {
new AnalysisException("function is only supported in v1 catalog")
}
+
+ def lookupFunctionInNonFunctionCatalogError(
+ ident: Identifier, catalog: CatalogPlugin): Throwable = {
+ new AnalysisException(s"Trying to lookup function '$ident' in " +
+ s"catalog '${catalog.name()}', but it is not a FunctionCatalog.")
+ }
+
+ def functionCannotProcessInputError(
+ unbound: UnboundFunction,
+ arguments: Seq[Expression],
+ unsupported: UnsupportedOperationException): Throwable = {
+ new AnalysisException(s"Function '${unbound.name}' cannot process " +
+ s"input: (${arguments.map(_.dataType.simpleString).mkString(", ")}): " +
+ unsupported.getMessage, cause = Some(unsupported))
+ }
+
+ def relationAliasNameIsAmbiguousInNestedCTEError(name: String): Throwable = {
Review comment:
`ambiguousNameInNestedCTEError`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -874,4 +878,205 @@ object QueryExecutionErrors {
def cannotCastUTF8StringToDataTypeError(s: UTF8String, to: DataType):
Throwable = {
new DateTimeException(s"Cannot cast $s to $to.")
}
+
+ def parseJsonArraysAsStructsError(): Throwable = {
Review comment:
cannotParseJsonArraysAsStructsError
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -874,4 +878,205 @@ object QueryExecutionErrors {
def cannotCastUTF8StringToDataTypeError(s: UTF8String, to: DataType):
Throwable = {
new DateTimeException(s"Cannot cast $s to $to.")
}
+
+ def parseJsonArraysAsStructsError(): Throwable = {
+ new RuntimeException("Parsing JSON arrays as structs is forbidden.")
+ }
+
+ def cannotParseStringAsDataTypeError(str: String, dataType: DataType):
Throwable = {
+ new RuntimeException(s"Cannot parse $str as ${dataType.catalogString}.")
+ }
+
+ def failToParseEmptyStringForDataTypeError(dataType: DataType): Throwable = {
+ new RuntimeException(
+ s"Failed to parse an empty string for data type
${dataType.catalogString}")
+ }
+
+ def failToParseValueForDataTypeError(dataType: DataType, token: JsonToken):
Throwable = {
+ new RuntimeException(
+ s"Failed to parse a value for data type ${dataType.catalogString}
(current token: $token).")
+ }
+
+ def rootConverterReturnNullError(): Throwable = {
+ new RuntimeException("Root converter returned null")
+ }
+
+ def cannotHaveCircularReferencesInBeanClassError(clazz: Class[_]): Throwable
= {
+ new UnsupportedOperationException(
+ "Cannot have circular references in bean class, but got the circular
reference " +
+ s"of class $clazz")
+ }
+
+ def cannotHaveCircularReferencesInClassError(t: String): Throwable = {
+ new UnsupportedOperationException(
+ s"cannot have circular references in class, but got the circular
reference of class $t")
+ }
+
+ def cannotUseInvalidIdentifierOfJavaAsFieldNameError(
+ fieldName: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"`$fieldName` is not a valid identifier
of " +
+ s"Java and cannot be used as field name\n$walkedTypePath")
+ }
+
+ def cannotFindEncoderForTypeError(
+ tpe: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"No Encoder found for
$tpe\n$walkedTypePath")
+ }
+
+ def attributesForTypeUnsupportedError(schema: Schema): Throwable = {
+ new UnsupportedOperationException(s"Attributes for type $schema is not
supported")
+ }
+
+ def schemaForTypeUnsupportedError(tpe: String): Throwable = {
+ new UnsupportedOperationException(s"Schema for type $tpe is not supported")
+ }
+
+ def cannotFindConstructorForTypeError(tpe: String): Throwable = {
+ new UnsupportedOperationException(
+ s"""
+ |Unable to find constructor for $tpe.
+ |This could happen if $tpe is an interface, or a trait without
companion object
+ |constructor.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def paramExceedOneCharError(paramName: String): Throwable = {
+ new RuntimeException(s"$paramName cannot be more than one character")
+ }
+
+ def paramIsNotIntegerError(paramName: String, value: String): Throwable = {
+ new RuntimeException(s"$paramName should be an integer. Found $value")
+ }
+
+ def paramIsNotBooleanValueError(paramName: String): Throwable = {
+ new Exception(s"$paramName flag can be true or false")
+ }
+
+ def notNullableFieldNotAcceptNullValueError(name: String): Throwable = {
+ new RuntimeException(s"null value found but field $name is not nullable.")
+ }
+
+ def malformedCSVRecordError(): Throwable = {
+ new RuntimeException("Malformed CSV record")
+ }
+
+ def elementsOfTupleExceedLimitError(): Throwable = {
+ new UnsupportedOperationException("Due to Scala's limited support of
tuple, " +
+ "tuple with more than 22 elements are not supported.")
+ }
+
+ def expressionDecodingError(e: Exception, expressions: Seq[Expression]):
Throwable = {
+ new RuntimeException(s"Error while decoding: $e\n" +
+
s"${expressions.map(_.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")}",
e)
+ }
+
+ def expressionEncodingError(e: Exception, expressions: Seq[Expression]):
Throwable = {
+ new RuntimeException(s"Error while encoding: $e\n" +
+
s"${expressions.map(_.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")}",
e)
+ }
+
+ def classHasUnexpectedSerializerError(clsName: String, objSerializer:
Expression): Throwable = {
+ new RuntimeException(s"class $clsName has unexpected serializer:
$objSerializer")
+ }
+
+ def cannotGetOuterPointerForInnerClassError(innerCls: Class[_]): Throwable =
{
+ new RuntimeException(s"Failed to get outer pointer for
${innerCls.getName}")
+ }
+
+ def userDefinedTypeIsNotAnnotatedAndRegisteredError(udt:
UserDefinedType[_]): Throwable = {
Review comment:
userDefinedTypeNotAnnotatedAndRegisteredError
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -874,4 +878,205 @@ object QueryExecutionErrors {
def cannotCastUTF8StringToDataTypeError(s: UTF8String, to: DataType):
Throwable = {
new DateTimeException(s"Cannot cast $s to $to.")
}
+
+ def parseJsonArraysAsStructsError(): Throwable = {
+ new RuntimeException("Parsing JSON arrays as structs is forbidden.")
+ }
+
+ def cannotParseStringAsDataTypeError(str: String, dataType: DataType):
Throwable = {
+ new RuntimeException(s"Cannot parse $str as ${dataType.catalogString}.")
+ }
+
+ def failToParseEmptyStringForDataTypeError(dataType: DataType): Throwable = {
+ new RuntimeException(
+ s"Failed to parse an empty string for data type
${dataType.catalogString}")
+ }
+
+ def failToParseValueForDataTypeError(dataType: DataType, token: JsonToken):
Throwable = {
+ new RuntimeException(
+ s"Failed to parse a value for data type ${dataType.catalogString}
(current token: $token).")
+ }
+
+ def rootConverterReturnNullError(): Throwable = {
+ new RuntimeException("Root converter returned null")
+ }
+
+ def cannotHaveCircularReferencesInBeanClassError(clazz: Class[_]): Throwable
= {
+ new UnsupportedOperationException(
+ "Cannot have circular references in bean class, but got the circular
reference " +
+ s"of class $clazz")
+ }
+
+ def cannotHaveCircularReferencesInClassError(t: String): Throwable = {
+ new UnsupportedOperationException(
+ s"cannot have circular references in class, but got the circular
reference of class $t")
+ }
+
+ def cannotUseInvalidIdentifierOfJavaAsFieldNameError(
+ fieldName: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"`$fieldName` is not a valid identifier
of " +
+ s"Java and cannot be used as field name\n$walkedTypePath")
+ }
+
+ def cannotFindEncoderForTypeError(
+ tpe: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"No Encoder found for
$tpe\n$walkedTypePath")
+ }
+
+ def attributesForTypeUnsupportedError(schema: Schema): Throwable = {
+ new UnsupportedOperationException(s"Attributes for type $schema is not
supported")
+ }
+
+ def schemaForTypeUnsupportedError(tpe: String): Throwable = {
+ new UnsupportedOperationException(s"Schema for type $tpe is not supported")
+ }
+
+ def cannotFindConstructorForTypeError(tpe: String): Throwable = {
+ new UnsupportedOperationException(
+ s"""
+ |Unable to find constructor for $tpe.
+ |This could happen if $tpe is an interface, or a trait without
companion object
+ |constructor.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def paramExceedOneCharError(paramName: String): Throwable = {
+ new RuntimeException(s"$paramName cannot be more than one character")
+ }
+
+ def paramIsNotIntegerError(paramName: String, value: String): Throwable = {
+ new RuntimeException(s"$paramName should be an integer. Found $value")
+ }
+
+ def paramIsNotBooleanValueError(paramName: String): Throwable = {
+ new Exception(s"$paramName flag can be true or false")
+ }
+
+ def notNullableFieldNotAcceptNullValueError(name: String): Throwable = {
+ new RuntimeException(s"null value found but field $name is not nullable.")
+ }
+
+ def malformedCSVRecordError(): Throwable = {
+ new RuntimeException("Malformed CSV record")
+ }
+
+ def elementsOfTupleExceedLimitError(): Throwable = {
+ new UnsupportedOperationException("Due to Scala's limited support of
tuple, " +
+ "tuple with more than 22 elements are not supported.")
+ }
+
+ def expressionDecodingError(e: Exception, expressions: Seq[Expression]):
Throwable = {
+ new RuntimeException(s"Error while decoding: $e\n" +
+
s"${expressions.map(_.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")}",
e)
+ }
+
+ def expressionEncodingError(e: Exception, expressions: Seq[Expression]):
Throwable = {
+ new RuntimeException(s"Error while encoding: $e\n" +
+
s"${expressions.map(_.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")}",
e)
+ }
+
+ def classHasUnexpectedSerializerError(clsName: String, objSerializer:
Expression): Throwable = {
+ new RuntimeException(s"class $clsName has unexpected serializer:
$objSerializer")
+ }
+
+ def cannotGetOuterPointerForInnerClassError(innerCls: Class[_]): Throwable =
{
+ new RuntimeException(s"Failed to get outer pointer for
${innerCls.getName}")
+ }
+
+ def userDefinedTypeIsNotAnnotatedAndRegisteredError(udt:
UserDefinedType[_]): Throwable = {
+ new SparkException(s"${udt.userClass.getName} is not annotated with " +
+ "SQLUserDefinedType nor registered with UDTRegistration.}")
+ }
+
+ def invalidInputSyntaxForBooleanError(s: UTF8String): Throwable = {
+ new UnsupportedOperationException(s"invalid input syntax for type boolean:
$s")
+ }
+
+ def unsupportedOperandTypeForSizeFunctionError(dataType: DataType):
Throwable = {
+ new UnsupportedOperationException(
+ s"The size function doesn't support the operand type
${dataType.getClass.getCanonicalName}")
+ }
+
+ def unexpectedValueForStartInFunctionError(prettyName: String): Throwable = {
+ new RuntimeException(
+ s"Unexpected value for start in function $prettyName: SQL array indices
start at 1.")
+ }
+
+ def unexpectedValueForLengthInFunctionError(prettyName: String): Throwable =
{
+ new RuntimeException(s"Unexpected value for length in function
$prettyName: " +
+ "length must be greater than or equal to 0.")
+ }
+
+ def sqlArrayIndexNotStartAtOneError(): Throwable = {
+ new ArrayIndexOutOfBoundsException("SQL array indices start at 1")
+ }
+
+ def concatArraysWithElementsExceedLimitError(numberOfElements: Long):
Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to concat arrays with $numberOfElements
+ |elements due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def flattenArraysWithElementsExceedLimitError(numberOfElements: Long):
Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to flatten an array of arrays with $numberOfElements
+ |elements due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def createArrayWithElementsExceedLimitError(count: Any): Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to create array with $count elements
+ |due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def unionArrayWithElementsExceedLimitError(length: Int): Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to union arrays with $length
+ |elements due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def initialTypeIsNotSuitableDataTypeError(dataType: DataType, target:
String): Throwable = {
+ new UnsupportedOperationException(s"Initial type ${dataType.catalogString}
must be a $target")
+ }
+
+ def initialTypeIsNotSuitableDataTypesError(dataType: DataType): Throwable = {
Review comment:
initialTypeNotTargetDataTypesError
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -874,4 +878,205 @@ object QueryExecutionErrors {
def cannotCastUTF8StringToDataTypeError(s: UTF8String, to: DataType):
Throwable = {
new DateTimeException(s"Cannot cast $s to $to.")
}
+
+ def parseJsonArraysAsStructsError(): Throwable = {
+ new RuntimeException("Parsing JSON arrays as structs is forbidden.")
+ }
+
+ def cannotParseStringAsDataTypeError(str: String, dataType: DataType):
Throwable = {
+ new RuntimeException(s"Cannot parse $str as ${dataType.catalogString}.")
+ }
+
+ def failToParseEmptyStringForDataTypeError(dataType: DataType): Throwable = {
+ new RuntimeException(
+ s"Failed to parse an empty string for data type
${dataType.catalogString}")
+ }
+
+ def failToParseValueForDataTypeError(dataType: DataType, token: JsonToken):
Throwable = {
+ new RuntimeException(
+ s"Failed to parse a value for data type ${dataType.catalogString}
(current token: $token).")
+ }
+
+ def rootConverterReturnNullError(): Throwable = {
+ new RuntimeException("Root converter returned null")
+ }
+
+ def cannotHaveCircularReferencesInBeanClassError(clazz: Class[_]): Throwable
= {
+ new UnsupportedOperationException(
+ "Cannot have circular references in bean class, but got the circular
reference " +
+ s"of class $clazz")
+ }
+
+ def cannotHaveCircularReferencesInClassError(t: String): Throwable = {
+ new UnsupportedOperationException(
+ s"cannot have circular references in class, but got the circular
reference of class $t")
+ }
+
+ def cannotUseInvalidIdentifierOfJavaAsFieldNameError(
Review comment:
cannotUseInvalidJavaIdentifierAsFieldNameError
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -874,4 +878,205 @@ object QueryExecutionErrors {
def cannotCastUTF8StringToDataTypeError(s: UTF8String, to: DataType):
Throwable = {
new DateTimeException(s"Cannot cast $s to $to.")
}
+
+ def parseJsonArraysAsStructsError(): Throwable = {
+ new RuntimeException("Parsing JSON arrays as structs is forbidden.")
+ }
+
+ def cannotParseStringAsDataTypeError(str: String, dataType: DataType):
Throwable = {
+ new RuntimeException(s"Cannot parse $str as ${dataType.catalogString}.")
+ }
+
+ def failToParseEmptyStringForDataTypeError(dataType: DataType): Throwable = {
+ new RuntimeException(
+ s"Failed to parse an empty string for data type
${dataType.catalogString}")
+ }
+
+ def failToParseValueForDataTypeError(dataType: DataType, token: JsonToken):
Throwable = {
+ new RuntimeException(
+ s"Failed to parse a value for data type ${dataType.catalogString}
(current token: $token).")
+ }
+
+ def rootConverterReturnNullError(): Throwable = {
+ new RuntimeException("Root converter returned null")
+ }
+
+ def cannotHaveCircularReferencesInBeanClassError(clazz: Class[_]): Throwable
= {
+ new UnsupportedOperationException(
+ "Cannot have circular references in bean class, but got the circular
reference " +
+ s"of class $clazz")
+ }
+
+ def cannotHaveCircularReferencesInClassError(t: String): Throwable = {
+ new UnsupportedOperationException(
+ s"cannot have circular references in class, but got the circular
reference of class $t")
+ }
+
+ def cannotUseInvalidIdentifierOfJavaAsFieldNameError(
+ fieldName: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"`$fieldName` is not a valid identifier
of " +
+ s"Java and cannot be used as field name\n$walkedTypePath")
+ }
+
+ def cannotFindEncoderForTypeError(
+ tpe: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"No Encoder found for
$tpe\n$walkedTypePath")
+ }
+
+ def attributesForTypeUnsupportedError(schema: Schema): Throwable = {
+ new UnsupportedOperationException(s"Attributes for type $schema is not
supported")
+ }
+
+ def schemaForTypeUnsupportedError(tpe: String): Throwable = {
+ new UnsupportedOperationException(s"Schema for type $tpe is not supported")
+ }
+
+ def cannotFindConstructorForTypeError(tpe: String): Throwable = {
+ new UnsupportedOperationException(
+ s"""
+ |Unable to find constructor for $tpe.
+ |This could happen if $tpe is an interface, or a trait without
companion object
+ |constructor.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def paramExceedOneCharError(paramName: String): Throwable = {
+ new RuntimeException(s"$paramName cannot be more than one character")
+ }
+
+ def paramIsNotIntegerError(paramName: String, value: String): Throwable = {
+ new RuntimeException(s"$paramName should be an integer. Found $value")
+ }
+
+ def paramIsNotBooleanValueError(paramName: String): Throwable = {
+ new Exception(s"$paramName flag can be true or false")
+ }
+
+ def notNullableFieldNotAcceptNullValueError(name: String): Throwable = {
+ new RuntimeException(s"null value found but field $name is not nullable.")
+ }
+
+ def malformedCSVRecordError(): Throwable = {
+ new RuntimeException("Malformed CSV record")
+ }
+
+ def elementsOfTupleExceedLimitError(): Throwable = {
+ new UnsupportedOperationException("Due to Scala's limited support of
tuple, " +
+ "tuple with more than 22 elements are not supported.")
+ }
+
+ def expressionDecodingError(e: Exception, expressions: Seq[Expression]):
Throwable = {
+ new RuntimeException(s"Error while decoding: $e\n" +
+
s"${expressions.map(_.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")}",
e)
+ }
+
+ def expressionEncodingError(e: Exception, expressions: Seq[Expression]):
Throwable = {
+ new RuntimeException(s"Error while encoding: $e\n" +
+
s"${expressions.map(_.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")}",
e)
+ }
+
+ def classHasUnexpectedSerializerError(clsName: String, objSerializer:
Expression): Throwable = {
+ new RuntimeException(s"class $clsName has unexpected serializer:
$objSerializer")
+ }
+
+ def cannotGetOuterPointerForInnerClassError(innerCls: Class[_]): Throwable =
{
+ new RuntimeException(s"Failed to get outer pointer for
${innerCls.getName}")
+ }
+
+ def userDefinedTypeIsNotAnnotatedAndRegisteredError(udt:
UserDefinedType[_]): Throwable = {
+ new SparkException(s"${udt.userClass.getName} is not annotated with " +
+ "SQLUserDefinedType nor registered with UDTRegistration.}")
+ }
+
+ def invalidInputSyntaxForBooleanError(s: UTF8String): Throwable = {
+ new UnsupportedOperationException(s"invalid input syntax for type boolean:
$s")
+ }
+
+ def unsupportedOperandTypeForSizeFunctionError(dataType: DataType):
Throwable = {
+ new UnsupportedOperationException(
+ s"The size function doesn't support the operand type
${dataType.getClass.getCanonicalName}")
+ }
+
+ def unexpectedValueForStartInFunctionError(prettyName: String): Throwable = {
+ new RuntimeException(
+ s"Unexpected value for start in function $prettyName: SQL array indices
start at 1.")
+ }
+
+ def unexpectedValueForLengthInFunctionError(prettyName: String): Throwable =
{
+ new RuntimeException(s"Unexpected value for length in function
$prettyName: " +
+ "length must be greater than or equal to 0.")
+ }
+
+ def sqlArrayIndexNotStartAtOneError(): Throwable = {
+ new ArrayIndexOutOfBoundsException("SQL array indices start at 1")
+ }
+
+ def concatArraysWithElementsExceedLimitError(numberOfElements: Long):
Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to concat arrays with $numberOfElements
+ |elements due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def flattenArraysWithElementsExceedLimitError(numberOfElements: Long):
Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to flatten an array of arrays with $numberOfElements
+ |elements due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def createArrayWithElementsExceedLimitError(count: Any): Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to create array with $count elements
+ |due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def unionArrayWithElementsExceedLimitError(length: Int): Throwable = {
+ new RuntimeException(
+ s"""
+ |Unsuccessful try to union arrays with $length
+ |elements due to exceeding the array size limit
+ |${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def initialTypeIsNotSuitableDataTypeError(dataType: DataType, target:
String): Throwable = {
Review comment:
Maybe `initialTypeNotTargetDataTypeError`?
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -874,4 +878,205 @@ object QueryExecutionErrors {
def cannotCastUTF8StringToDataTypeError(s: UTF8String, to: DataType):
Throwable = {
new DateTimeException(s"Cannot cast $s to $to.")
}
+
+ def parseJsonArraysAsStructsError(): Throwable = {
+ new RuntimeException("Parsing JSON arrays as structs is forbidden.")
+ }
+
+ def cannotParseStringAsDataTypeError(str: String, dataType: DataType):
Throwable = {
+ new RuntimeException(s"Cannot parse $str as ${dataType.catalogString}.")
+ }
+
+ def failToParseEmptyStringForDataTypeError(dataType: DataType): Throwable = {
+ new RuntimeException(
+ s"Failed to parse an empty string for data type
${dataType.catalogString}")
+ }
+
+ def failToParseValueForDataTypeError(dataType: DataType, token: JsonToken):
Throwable = {
+ new RuntimeException(
+ s"Failed to parse a value for data type ${dataType.catalogString}
(current token: $token).")
+ }
+
+ def rootConverterReturnNullError(): Throwable = {
+ new RuntimeException("Root converter returned null")
+ }
+
+ def cannotHaveCircularReferencesInBeanClassError(clazz: Class[_]): Throwable
= {
+ new UnsupportedOperationException(
+ "Cannot have circular references in bean class, but got the circular
reference " +
+ s"of class $clazz")
+ }
+
+ def cannotHaveCircularReferencesInClassError(t: String): Throwable = {
+ new UnsupportedOperationException(
+ s"cannot have circular references in class, but got the circular
reference of class $t")
+ }
+
+ def cannotUseInvalidIdentifierOfJavaAsFieldNameError(
+ fieldName: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"`$fieldName` is not a valid identifier
of " +
+ s"Java and cannot be used as field name\n$walkedTypePath")
+ }
+
+ def cannotFindEncoderForTypeError(
+ tpe: String, walkedTypePath: WalkedTypePath): Throwable = {
+ new UnsupportedOperationException(s"No Encoder found for
$tpe\n$walkedTypePath")
+ }
+
+ def attributesForTypeUnsupportedError(schema: Schema): Throwable = {
+ new UnsupportedOperationException(s"Attributes for type $schema is not
supported")
+ }
+
+ def schemaForTypeUnsupportedError(tpe: String): Throwable = {
+ new UnsupportedOperationException(s"Schema for type $tpe is not supported")
+ }
+
+ def cannotFindConstructorForTypeError(tpe: String): Throwable = {
+ new UnsupportedOperationException(
+ s"""
+ |Unable to find constructor for $tpe.
+ |This could happen if $tpe is an interface, or a trait without
companion object
+ |constructor.
+ """.stripMargin.replaceAll("\n", " "))
+ }
+
+ def paramExceedOneCharError(paramName: String): Throwable = {
+ new RuntimeException(s"$paramName cannot be more than one character")
+ }
+
+ def paramIsNotIntegerError(paramName: String, value: String): Throwable = {
+ new RuntimeException(s"$paramName should be an integer. Found $value")
+ }
+
+ def paramIsNotBooleanValueError(paramName: String): Throwable = {
+ new Exception(s"$paramName flag can be true or false")
+ }
+
+ def notNullableFieldNotAcceptNullValueError(name: String): Throwable = {
Review comment:
foundNullValueForNotNullableFieldError
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1391,4 +1393,100 @@ private[spark] object QueryCompilationErrors {
def functionUnsupportedInV2CatalogError(): Throwable = {
new AnalysisException("function is only supported in v1 catalog")
}
+
+ def lookupFunctionInNonFunctionCatalogError(
+ ident: Identifier, catalog: CatalogPlugin): Throwable = {
+ new AnalysisException(s"Trying to lookup function '$ident' in " +
+ s"catalog '${catalog.name()}', but it is not a FunctionCatalog.")
+ }
+
+ def functionCannotProcessInputError(
+ unbound: UnboundFunction,
+ arguments: Seq[Expression],
+ unsupported: UnsupportedOperationException): Throwable = {
+ new AnalysisException(s"Function '${unbound.name}' cannot process " +
+ s"input: (${arguments.map(_.dataType.simpleString).mkString(", ")}): " +
+ unsupported.getMessage, cause = Some(unsupported))
+ }
+
+ def relationAliasNameIsAmbiguousInNestedCTEError(name: String): Throwable = {
+ new AnalysisException(s"Name $name is ambiguous in nested CTE. " +
+ s"Please set ${LEGACY_CTE_PRECEDENCE_POLICY.key} to CORRECTED so that
name " +
+ "defined in inner CTE takes precedence. If set it to LEGACY, outer CTE "
+
+ "definitions will take precedence. See more details in SPARK-28228.")
+ }
+
+ def commandUnsupportedInV2CatalogError(name: String): Throwable = {
Review comment:
`commandUnsupportedInV2TableError`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]