jackylk commented on a change in pull request #3538: [CARBONDATA-3637] Optimize
insert into flow
URL: https://github.com/apache/carbondata/pull/3538#discussion_r376193657
##########
File path:
integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
##########
@@ -834,4 +843,179 @@ object CommonUtil {
displaySize
}
+ def getObjectArrayFromInternalRowAndConvertComplexType(row: InternalRow,
+ fieldTypes: Seq[DataType],
+ outputArrayLength: Int): Array[AnyRef] = {
+ val data = new Array[AnyRef](outputArrayLength)
+ var i = 0
+ val fieldTypesLen = fieldTypes.length
+ while (i < fieldTypesLen) {
+ if (!row.isNullAt(i)) {
+ fieldTypes(i) match {
+ case StringType =>
+ data(i) = row.getString(i)
+ case d: DecimalType =>
+ data(i) = row.getDecimal(i, d.precision, d.scale).toJavaBigDecimal
+ case arrayType : ArrayType =>
+ data(i) = convertSparkComplexTypeToCarbonObject(row.getArray(i),
arrayType)
+ case structType : StructType =>
+ data(i) = convertSparkComplexTypeToCarbonObject(row.getStruct(i,
+ structType.fields.length), structType)
+ case mapType : MapType =>
+ data(i) = convertSparkComplexTypeToCarbonObject(row.getMap(i),
mapType)
+ case other =>
+ data(i) = row.get(i, other)
+ }
+ }
+ i += 1
+ }
+ data
+ }
+
+ /**
+ * After converting complex objects to carbon objects, need to convert to
byte array
+ *
+ * @param row
+ * @param fields
+ * @param dataFieldsWithComplexDataType
+ * @return
+ */
+ def getObjectArrayFromInternalRowAndConvertComplexTypeForGlobalSort(
+ row: InternalRow,
+ fields: Seq[StructField],
+ dataFieldsWithComplexDataType: Map[String, GenericDataType[_]]):
Array[AnyRef] = {
Review comment:
return type should be Array[Byte]?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services