maropu commented on a change in pull request #29085:
URL: https://github.com/apache/spark/pull/29085#discussion_r456293505



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala
##########
@@ -87,17 +131,55 @@ trait BaseScriptTransformationExec extends UnaryExecNode {
       }
     }
   }
+
+  private lazy val fieldWriters: Seq[String => Any] = output.map { attr =>
+    val converter = 
CatalystTypeConverters.createToCatalystConverter(attr.dataType)
+    attr.dataType match {
+      case StringType => (data: String) => converter(data)
+      case ByteType => (data: String) => converter(data.toByte)
+      case IntegerType => (data: String) => converter(data.toInt)
+      case ShortType => (data: String) => converter(data.toShort)
+      case LongType => (data: String) => converter(data.toLong)
+      case FloatType => (data: String) => converter(data.toFloat)
+      case DoubleType => (data: String) => converter(data.toDouble)
+      case dt: DecimalType => (data: String) => converter(BigDecimal(data))
+      case DateType if conf.datetimeJava8ApiEnabled => (data: String) =>

Review comment:
       I checked the `TRANSFORM` behaivours in v3.0;
   ```
   scala> sql("""
   SELECT TRANSFORM(a)
   USING 'cat' as output
   FROM VALUES (timestamp '2020-07-10 10:00:00') t(a)
   """).show()
   
   java.lang.ClassCastException: java.lang.Long cannot be cast to 
java.sql.Timestamp
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaTimestampObjectInspector.getPrimitiveWritableObject(JavaTimestampObjectInspector.java:34)
        at 
org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:305)
        at 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:292)
        at 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serializeField(LazySimpleSerDe.java:247)
        at 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.doSerialize(LazySimpleSerDe.java:231)
        at 
org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe.serialize(AbstractEncodingAwareSerDe.java:55)
   ```
   Seems like it support only basic data types: `byte, short, int, long, float, 
double, and string` in the default SerDe. So, this PR seems to propose the two 
things: implementing `TRANSFORM` in `sql/core` and more type support in the 
default SerDe in Spark/Hive `TRANSFORM`. Is this correct?

##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala
##########
@@ -87,17 +131,55 @@ trait BaseScriptTransformationExec extends UnaryExecNode {
       }
     }
   }
+
+  private lazy val fieldWriters: Seq[String => Any] = output.map { attr =>
+    val converter = 
CatalystTypeConverters.createToCatalystConverter(attr.dataType)
+    attr.dataType match {
+      case StringType => (data: String) => converter(data)
+      case ByteType => (data: String) => converter(data.toByte)
+      case IntegerType => (data: String) => converter(data.toInt)
+      case ShortType => (data: String) => converter(data.toShort)
+      case LongType => (data: String) => converter(data.toLong)
+      case FloatType => (data: String) => converter(data.toFloat)
+      case DoubleType => (data: String) => converter(data.toDouble)
+      case dt: DecimalType => (data: String) => converter(BigDecimal(data))
+      case DateType if conf.datetimeJava8ApiEnabled => (data: String) =>

Review comment:
       I checked the existing `TRANSFORM` behaivours in v3.0;
   ```
   scala> sql("""
   SELECT TRANSFORM(a)
   USING 'cat' as output
   FROM VALUES (timestamp '2020-07-10 10:00:00') t(a)
   """).show()
   
   java.lang.ClassCastException: java.lang.Long cannot be cast to 
java.sql.Timestamp
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaTimestampObjectInspector.getPrimitiveWritableObject(JavaTimestampObjectInspector.java:34)
        at 
org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:305)
        at 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serialize(LazySimpleSerDe.java:292)
        at 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.serializeField(LazySimpleSerDe.java:247)
        at 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.doSerialize(LazySimpleSerDe.java:231)
        at 
org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe.serialize(AbstractEncodingAwareSerDe.java:55)
   ```
   Seems like it support only basic data types: `byte, short, int, long, float, 
double, and string` in the default SerDe. So, this PR seems to propose the two 
things: implementing `TRANSFORM` in `sql/core` and more type support in the 
default SerDe in Spark/Hive `TRANSFORM`. Is this correct?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to