Repository: spark
Updated Branches:
  refs/heads/master fd315f588 -> 1b1528a50


[SPARK-24366][SQL] Improving of error messages for type converting

## What changes were proposed in this pull request?

Currently, users are getting the following error messages on type conversions:

```
scala.MatchError: test (of class java.lang.String)
```

The message doesn't give any clues to the users where in the schema the error 
happened. In this PR, I would like to improve the error message like:

```
The value (test) of the type (java.lang.String) cannot be converted to 
struct<f1:int>
```

## How was this patch tested?

Added tests for converting of wrong values to `struct`, `map`, `array`, 
`string` and `decimal`.

Author: Maxim Gekk <maxim.g...@databricks.com>

Closes #21410 from MaxGekk/type-conv-error.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1b1528a5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1b1528a5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1b1528a5

Branch: refs/heads/master
Commit: 1b1528a504febfadf6fe41fd72e657689da50525
Parents: fd315f5
Author: Maxim Gekk <maxim.g...@databricks.com>
Authored: Fri May 25 15:42:46 2018 -0700
Committer: Xiao Li <gatorsm...@gmail.com>
Committed: Fri May 25 15:42:46 2018 -0700

----------------------------------------------------------------------
 .../sql/catalyst/CatalystTypeConverters.scala   | 16 +++++++
 .../catalyst/CatalystTypeConvertersSuite.scala  | 45 ++++++++++++++++++++
 2 files changed, 61 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1b1528a5/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
index 474ec59..9e9105a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala
@@ -170,6 +170,9 @@ object CatalystTypeConverters {
             convertedIterable += elementConverter.toCatalyst(item)
           }
           new GenericArrayData(convertedIterable.toArray)
+        case other => throw new IllegalArgumentException(
+          s"The value (${other.toString}) of the type 
(${other.getClass.getCanonicalName}) "
+            + s"cannot be converted to an array of 
${elementType.catalogString}")
       }
     }
 
@@ -206,6 +209,10 @@ object CatalystTypeConverters {
       scalaValue match {
         case map: Map[_, _] => ArrayBasedMapData(map, keyFunction, 
valueFunction)
         case javaMap: JavaMap[_, _] => ArrayBasedMapData(javaMap, keyFunction, 
valueFunction)
+        case other => throw new IllegalArgumentException(
+          s"The value (${other.toString}) of the type 
(${other.getClass.getCanonicalName}) "
+            + "cannot be converted to a map type with "
+            + s"key type (${keyType.catalogString}) and value type 
(${valueType.catalogString})")
       }
     }
 
@@ -252,6 +259,9 @@ object CatalystTypeConverters {
           idx += 1
         }
         new GenericInternalRow(ar)
+      case other => throw new IllegalArgumentException(
+        s"The value (${other.toString}) of the type 
(${other.getClass.getCanonicalName}) "
+          + s"cannot be converted to ${structType.catalogString}")
     }
 
     override def toScala(row: InternalRow): Row = {
@@ -276,6 +286,9 @@ object CatalystTypeConverters {
     override def toCatalystImpl(scalaValue: Any): UTF8String = scalaValue 
match {
       case str: String => UTF8String.fromString(str)
       case utf8: UTF8String => utf8
+      case other => throw new IllegalArgumentException(
+        s"The value (${other.toString}) of the type 
(${other.getClass.getCanonicalName}) "
+          + s"cannot be converted to the string type")
     }
     override def toScala(catalystValue: UTF8String): String =
       if (catalystValue == null) null else catalystValue.toString
@@ -309,6 +322,9 @@ object CatalystTypeConverters {
         case d: JavaBigDecimal => Decimal(d)
         case d: JavaBigInteger => Decimal(d)
         case d: Decimal => d
+        case other => throw new IllegalArgumentException(
+          s"The value (${other.toString}) of the type 
(${other.getClass.getCanonicalName}) "
+            + s"cannot be converted to ${dataType.catalogString}")
       }
       decimal.toPrecision(dataType.precision, dataType.scale)
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/1b1528a5/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
index f3702ec..f99af9b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersSuite.scala
@@ -94,4 +94,49 @@ class CatalystTypeConvertersSuite extends SparkFunSuite {
     
assert(CatalystTypeConverters.createToCatalystConverter(doubleArrayType)(doubleArray)
       == doubleGenericArray)
   }
+
+  test("converting a wrong value to the struct type") {
+    val structType = new StructType().add("f1", IntegerType)
+    val exception = intercept[IllegalArgumentException] {
+      CatalystTypeConverters.createToCatalystConverter(structType)("test")
+    }
+    assert(exception.getMessage.contains("The value (test) of the type "
+      + "(java.lang.String) cannot be converted to struct<f1:int>"))
+  }
+
+  test("converting a wrong value to the map type") {
+    val mapType = MapType(StringType, IntegerType, false)
+    val exception = intercept[IllegalArgumentException] {
+      CatalystTypeConverters.createToCatalystConverter(mapType)("test")
+    }
+    assert(exception.getMessage.contains("The value (test) of the type "
+      + "(java.lang.String) cannot be converted to a map type with key "
+      + "type (string) and value type (int)"))
+  }
+
+  test("converting a wrong value to the array type") {
+    val arrayType = ArrayType(IntegerType, true)
+    val exception = intercept[IllegalArgumentException] {
+      CatalystTypeConverters.createToCatalystConverter(arrayType)("test")
+    }
+    assert(exception.getMessage.contains("The value (test) of the type "
+      + "(java.lang.String) cannot be converted to an array of int"))
+  }
+
+  test("converting a wrong value to the decimal type") {
+    val decimalType = DecimalType(10, 0)
+    val exception = intercept[IllegalArgumentException] {
+      CatalystTypeConverters.createToCatalystConverter(decimalType)("test")
+    }
+    assert(exception.getMessage.contains("The value (test) of the type "
+      + "(java.lang.String) cannot be converted to decimal(10,0)"))
+  }
+
+  test("converting a wrong value to the string type") {
+    val exception = intercept[IllegalArgumentException] {
+      CatalystTypeConverters.createToCatalystConverter(StringType)(0.1)
+    }
+    assert(exception.getMessage.contains("The value (0.1) of the type "
+      + "(java.lang.Double) cannot be converted to the string type"))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to