allisonwang-db commented on a change in pull request #32367:
URL: https://github.com/apache/spark/pull/32367#discussion_r623340674



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -823,4 +824,55 @@ object QueryExecutionErrors {
     new SparkException(s"Failed to merge incompatible data types 
${left.catalogString}" +
       s" and ${right.catalogString}")
   }
+
+  def exceedMapSizeWhenBuildMapError(size: Int): Throwable = {
+    new RuntimeException(s"Unsuccessful attempt to build maps with $size 
elements " +
+      s"due to exceeding the map size limit 
${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.")
+  }
+
+  def duplicateMapKeyFoundError(key: Any): Throwable = {
+    new RuntimeException(s"Duplicate map key $key was found, please check the 
input " +
+      "data. If you want to remove the duplicated keys, you can set " +
+      s"${SQLConf.MAP_KEY_DEDUP_POLICY.key} to 
${SQLConf.MapKeyDedupPolicy.LAST_WIN} so that " +
+      "the key inserted at last takes precedence.")
+  }
+
+  def differentLengthOfKeyArrayAndValueArrayOfMapDataError(): Throwable = {

Review comment:
       mapDataKeyArrayLengthDiffersFromValueArrayLengthError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -823,4 +824,55 @@ object QueryExecutionErrors {
     new SparkException(s"Failed to merge incompatible data types 
${left.catalogString}" +
       s" and ${right.catalogString}")
   }
+
+  def exceedMapSizeWhenBuildMapError(size: Int): Throwable = {

Review comment:
       exceedMapSizeLimitError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -823,4 +824,55 @@ object QueryExecutionErrors {
     new SparkException(s"Failed to merge incompatible data types 
${left.catalogString}" +
       s" and ${right.catalogString}")
   }
+
+  def exceedMapSizeWhenBuildMapError(size: Int): Throwable = {
+    new RuntimeException(s"Unsuccessful attempt to build maps with $size 
elements " +
+      s"due to exceeding the map size limit 
${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.")
+  }
+
+  def duplicateMapKeyFoundError(key: Any): Throwable = {
+    new RuntimeException(s"Duplicate map key $key was found, please check the 
input " +
+      "data. If you want to remove the duplicated keys, you can set " +
+      s"${SQLConf.MAP_KEY_DEDUP_POLICY.key} to 
${SQLConf.MapKeyDedupPolicy.LAST_WIN} so that " +
+      "the key inserted at last takes precedence.")
+  }
+
+  def differentLengthOfKeyArrayAndValueArrayOfMapDataError(): Throwable = {
+    new RuntimeException("The key array and value array of MapData must have 
the same length.")
+  }
+
+  def fieldDiffersFromDerivedOneError(
+      field: ChronoField, actual: Int, expected: Int, candidate: LocalDate): 
Throwable = {
+    new DateTimeException(s"Conflict found: Field $field $actual differs from" 
+
+      s" $field $expected derived from $candidate")
+  }
+
+  def failedParseDateTimeInNewParserError(s: String, e: Throwable): Throwable 
= {

Review comment:
       failToParseDateTimeInNewParserError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -823,4 +824,55 @@ object QueryExecutionErrors {
     new SparkException(s"Failed to merge incompatible data types 
${left.catalogString}" +
       s" and ${right.catalogString}")
   }
+
+  def exceedMapSizeWhenBuildMapError(size: Int): Throwable = {
+    new RuntimeException(s"Unsuccessful attempt to build maps with $size 
elements " +
+      s"due to exceeding the map size limit 
${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.")
+  }
+
+  def duplicateMapKeyFoundError(key: Any): Throwable = {
+    new RuntimeException(s"Duplicate map key $key was found, please check the 
input " +
+      "data. If you want to remove the duplicated keys, you can set " +
+      s"${SQLConf.MAP_KEY_DEDUP_POLICY.key} to 
${SQLConf.MapKeyDedupPolicy.LAST_WIN} so that " +
+      "the key inserted at last takes precedence.")
+  }
+
+  def differentLengthOfKeyArrayAndValueArrayOfMapDataError(): Throwable = {
+    new RuntimeException("The key array and value array of MapData must have 
the same length.")
+  }
+
+  def fieldDiffersFromDerivedOneError(
+      field: ChronoField, actual: Int, expected: Int, candidate: LocalDate): 
Throwable = {
+    new DateTimeException(s"Conflict found: Field $field $actual differs from" 
+
+      s" $field $expected derived from $candidate")
+  }
+
+  def failedParseDateTimeInNewParserError(s: String, e: Throwable): Throwable 
= {
+    new SparkUpgradeException("3.0", s"Fail to parse '$s' in the new parser. 
You can " +
+      s"set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the 
behavior " +
+      s"before Spark 3.0, or set to CORRECTED and treat it as an invalid 
datetime string.", e)
+  }
+
+  def failedFormatDateTimeInNewFormatterError(
+      resultCandidate: String, e: Throwable): Throwable = {
+    new SparkUpgradeException("3.0",
+      s"""
+         |Fail to format it to '$resultCandidate' in the new formatter. You 
can set
+         |${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the 
behavior before
+         |Spark 3.0, or set to CORRECTED and treat it as an invalid datetime 
string.
+       """.stripMargin.replaceAll("\n", " "), e)
+  }
+
+  def failedRecognizePatternInDateTimeFormatterError(

Review comment:
       failToRecognizePatternInDateTimeFormatterError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -823,4 +824,55 @@ object QueryExecutionErrors {
     new SparkException(s"Failed to merge incompatible data types 
${left.catalogString}" +
       s" and ${right.catalogString}")
   }
+
+  def exceedMapSizeWhenBuildMapError(size: Int): Throwable = {
+    new RuntimeException(s"Unsuccessful attempt to build maps with $size 
elements " +
+      s"due to exceeding the map size limit 
${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.")
+  }
+
+  def duplicateMapKeyFoundError(key: Any): Throwable = {
+    new RuntimeException(s"Duplicate map key $key was found, please check the 
input " +
+      "data. If you want to remove the duplicated keys, you can set " +
+      s"${SQLConf.MAP_KEY_DEDUP_POLICY.key} to 
${SQLConf.MapKeyDedupPolicy.LAST_WIN} so that " +
+      "the key inserted at last takes precedence.")
+  }
+
+  def differentLengthOfKeyArrayAndValueArrayOfMapDataError(): Throwable = {
+    new RuntimeException("The key array and value array of MapData must have 
the same length.")
+  }
+
+  def fieldDiffersFromDerivedOneError(

Review comment:
       maybe `fieldDiffersFromDerivedLocalDateError`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -823,4 +824,55 @@ object QueryExecutionErrors {
     new SparkException(s"Failed to merge incompatible data types 
${left.catalogString}" +
       s" and ${right.catalogString}")
   }
+
+  def exceedMapSizeWhenBuildMapError(size: Int): Throwable = {
+    new RuntimeException(s"Unsuccessful attempt to build maps with $size 
elements " +
+      s"due to exceeding the map size limit 
${ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH}.")
+  }
+
+  def duplicateMapKeyFoundError(key: Any): Throwable = {
+    new RuntimeException(s"Duplicate map key $key was found, please check the 
input " +
+      "data. If you want to remove the duplicated keys, you can set " +
+      s"${SQLConf.MAP_KEY_DEDUP_POLICY.key} to 
${SQLConf.MapKeyDedupPolicy.LAST_WIN} so that " +
+      "the key inserted at last takes precedence.")
+  }
+
+  def differentLengthOfKeyArrayAndValueArrayOfMapDataError(): Throwable = {
+    new RuntimeException("The key array and value array of MapData must have 
the same length.")
+  }
+
+  def fieldDiffersFromDerivedOneError(
+      field: ChronoField, actual: Int, expected: Int, candidate: LocalDate): 
Throwable = {
+    new DateTimeException(s"Conflict found: Field $field $actual differs from" 
+
+      s" $field $expected derived from $candidate")
+  }
+
+  def failedParseDateTimeInNewParserError(s: String, e: Throwable): Throwable 
= {
+    new SparkUpgradeException("3.0", s"Fail to parse '$s' in the new parser. 
You can " +
+      s"set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore the 
behavior " +
+      s"before Spark 3.0, or set to CORRECTED and treat it as an invalid 
datetime string.", e)
+  }
+
+  def failedFormatDateTimeInNewFormatterError(

Review comment:
       failToFormatDateTimeInNewFormatterError




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to