panbingkun commented on code in PR #45657:
URL: https://github.com/apache/spark/pull/45657#discussion_r1535478355


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/UnivocityGenerator.scala:
##########
@@ -64,33 +65,143 @@ class UnivocityGenerator(
   private val nullAsQuotedEmptyString =
     
SQLConf.get.getConf(SQLConf.LEGACY_NULL_VALUE_WRITTEN_AS_QUOTED_EMPTY_STRING_CSV)
 
-  @scala.annotation.tailrec
   private def makeConverter(dataType: DataType): ValueConverter = dataType 
match {
+    case BinaryType =>
+      (getter, ordinal) => 
SparkStringUtils.getHexString(getter.getBinary(ordinal))
+
     case DateType =>
-      (row: InternalRow, ordinal: Int) => 
dateFormatter.format(row.getInt(ordinal))
+      (getter, ordinal) => dateFormatter.format(getter.getInt(ordinal))
 
     case TimestampType =>
-      (row: InternalRow, ordinal: Int) => 
timestampFormatter.format(row.getLong(ordinal))
+      (getter, ordinal) => timestampFormatter.format(getter.getLong(ordinal))
 
     case TimestampNTZType =>
-      (row: InternalRow, ordinal: Int) =>
-        
timestampNTZFormatter.format(DateTimeUtils.microsToLocalDateTime(row.getLong(ordinal)))
+      (getter, ordinal) =>
+        
timestampNTZFormatter.format(DateTimeUtils.microsToLocalDateTime(getter.getLong(ordinal)))
 
     case YearMonthIntervalType(start, end) =>
-      (row: InternalRow, ordinal: Int) =>
+      (getter, ordinal) =>
         IntervalUtils.toYearMonthIntervalString(
-          row.getInt(ordinal), IntervalStringStyles.ANSI_STYLE, start, end)
+          getter.getInt(ordinal), IntervalStringStyles.ANSI_STYLE, start, end)
 
     case DayTimeIntervalType(start, end) =>
-      (row: InternalRow, ordinal: Int) =>
-      IntervalUtils.toDayTimeIntervalString(
-        row.getLong(ordinal), IntervalStringStyles.ANSI_STYLE, start, end)
+      (getter, ordinal) =>
+        IntervalUtils.toDayTimeIntervalString(
+          getter.getLong(ordinal), IntervalStringStyles.ANSI_STYLE, start, end)
 
     case udt: UserDefinedType[_] => makeConverter(udt.sqlType)
 
+    case ArrayType(et, _) =>
+      val elementConverter = makeConverter(et)
+      (getter, ordinal) =>
+        val array = getter.getArray(ordinal)
+        val builder = new StringBuilder
+        builder.append("[")
+        if (array.numElements() > 0) {
+          if (array.isNullAt(0)) {
+            if (nullAsQuotedEmptyString) {
+              builder.append(options.nullValue)
+            } else {
+              builder.append(null.asInstanceOf[String])
+            }
+          } else {
+            builder.append(elementConverter(array, 0))
+          }
+          var i = 1
+          while (i < array.numElements()) {
+            builder.append(", ")
+            if (array.isNullAt(i)) {
+              if (nullAsQuotedEmptyString) {
+                builder.append(options.nullValue)
+              } else {
+                builder.append(null.asInstanceOf[String])
+              }
+            } else {
+              builder.append(elementConverter(array, i))
+            }
+            i += 1
+          }
+        }
+        builder.append("]")
+        builder.toString()
+
+    case MapType(kt, vt, _) =>
+      val keyConverter = makeConverter(kt)
+      val valueConverter = makeConverter(vt)
+      (getter, ordinal) =>
+        val map = getter.getMap(ordinal)
+        val builder = new StringBuilder
+        builder.append("{")
+        if (map.numElements() > 0) {
+          val keyArray = map.keyArray()
+          val valueArray = map.valueArray()
+          builder.append(keyConverter(keyArray, 0))
+          builder.append(" -> ")
+          if (valueArray.isNullAt(0)) {
+            if (nullAsQuotedEmptyString) {
+              builder.append(options.nullValue)
+            } else {
+              builder.append(null.asInstanceOf[String])
+            }
+          } else {
+            builder.append(valueConverter(valueArray, 0))
+          }
+          var i = 1
+          while (i < map.numElements()) {
+            builder.append(", ")
+            builder.append(keyConverter(keyArray, i))
+            builder.append(" -> ")
+            if (valueArray.isNullAt(i)) {
+              if (nullAsQuotedEmptyString) {
+                builder.append(options.nullValue)
+              } else {
+                builder.append(null.asInstanceOf[String])
+              }
+            } else {
+              builder.append(valueConverter(valueArray, i))
+            }
+            i += 1
+          }
+        }
+        builder.append("}")
+        builder.toString()
+
+    case StructType(fields) =>
+      val converters = fields.map(_.dataType).map(makeConverter)
+      (getter, ordinal) =>
+        val row = getter.getStruct(ordinal, fields.length)
+        val builder = new StringBuilder
+        builder.append("{")
+        if (row.numFields > 0) {
+          if (row.isNullAt(0)) {
+            if (nullAsQuotedEmptyString) {
+              builder.append(options.nullValue)
+            } else {
+              builder.append(null.asInstanceOf[String])
+            }
+          } else {
+            builder.append(converters(0)(row, 0))
+          }
+          var i = 1
+          while (i < row.numFields) {
+            builder.append(", ")
+            if (row.isNullAt(i)) {
+              if (nullAsQuotedEmptyString) {
+                builder.append(options.nullValue)
+              } else {
+                builder.append(null.asInstanceOf[String])
+              }
+            } else {
+              builder.append(converters(i)(row, i))

Review Comment:
   Okay, I found a bug in the original (Before applying `this PR`) `to_csv`, as 
follows:
   <img width="985" alt="image" 
src="https://github.com/apache/spark/assets/15246973/7491d868-3236-4518-8b27-7aed769d26e4";>
   
https://github.com/apache/spark/blob/227a50a1766ac1476b0031e1c60d2604eccdb9a7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/UnivocityGenerator.scala#L100-L104
   <img width="538" alt="image" 
src="https://github.com/apache/spark/assets/15246973/e576e7e0-a03b-497f-929a-cf8b1cdcfcc8";>
   
   when `row.isNullAt(i)` && `nullAsQuotedEmptyString == false`, 
   `values(i)` is not assigned, so it is the default value null, which seems 
incorrect?
   
   <img width="859" alt="image" 
src="https://github.com/apache/spark/assets/15246973/68d263e9-934d-436f-b79d-1a04b6cd5b76";>
   According to this interpretation, we should assign value `""`.
   
   @cloud-fan Should we fix it in this PR or open a separate PR to handle it?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to