Github user KanakaKumar commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2687#discussion_r216122505
--- Diff:
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithAvroDataType.scala
---
@@ -884,4 +962,220 @@ class TestNonTransactionalCarbonTableWithAvroDataType
extends QueryTest with Bef
checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(1728000,
Row(1728000))))
}
+ test("test logical type decimal through Json") {
+ sql("drop table if exists sdkOutputTable")
+
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+ val schema1 =
+ """{
+ | "namespace": "com.apache.schema",
+ | "type": "record",
+ | "name": "StudentActivity",
+ | "fields": [
+ | {
+ | "name": "id",
+ | "type": {"type" :
"bytes",
+ | "logicalType": "decimal",
+ | "precision": 5,
+ | "scale": 2
+ | }
+ |}
+ | ]
+ |}""".stripMargin
+ val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+ val logicalType = LogicalTypes.decimal(5, 2)
+ val decimal = new java.math.BigDecimal("12.8").setScale(2)
+ //get unscaled 2's complement bytearray
+ val bytes =
+ decimalConversion.toBytes(decimal, nn.getField("id").schema,
logicalType)
+ val data = DatatypeConverter.printBase64Binary(bytes.array())
+ val json1 =
+ s"""{"id":"$data"}""".stripMargin
+ val record = testUtil.jsonToAvro(json1, schema1)
+ val data1 = new String(record.get(0).asInstanceOf[ByteBuffer].array(),
+ CarbonCommonConstants.DEFAULT_CHARSET_CLASS)
+ val bytes1 =
ByteBuffer.wrap(DatatypeConverter.parseBase64Binary(data1))
+ val avroRec = new GenericData. Record(nn)
+ avroRec.put("id", bytes1)
+ val writer = CarbonWriter.builder
+
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+ writer.write(avroRec)
+ writer.close()
+ sql(
+ s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
+ |'carbondata' LOCATION
+ |'$writerPath' """.stripMargin)
+ checkExistence(sql("select * from sdkOutputTable"), true, "12.80")
+ }
+
+ test("test logical type decimal through Avro") {
+ sql("drop table if exists sdkOutputTable")
+
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+ val schema1 =
+ """{
+ | "namespace": "com.apache.schema",
+ | "type": "record",
+ | "name": "StudentActivity",
+ | "fields": [
+ | {
+ | "name": "id",
+ | "type": {"type" :
"bytes",
+ | "logicalType": "decimal",
+ | "precision": 5,
+ | "scale": 2
+ | }
+ |}
+ | ]
+ |}""".stripMargin
+ val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+ val logicalType = LogicalTypes.decimal(5, 2)
+ val decimal = new java.math.BigDecimal("12.8").setScale(2)
+ //get unscaled 2's complement bytearray
+ val bytes =
+ decimalConversion.toBytes(decimal, nn.getField("id").schema,
logicalType)
+ val data = DatatypeConverter.printBase64Binary(bytes.array())
+ val json1 =
+ s"""{"id":"$data"}""".stripMargin
+ val avroRec = new GenericData. Record(nn)
+ avroRec.put("id", bytes)
+ val writer = CarbonWriter.builder
+
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+ writer.write(avroRec)
+ writer.close()
+ sql(
+ s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
+ |'carbondata' LOCATION
+ |'$writerPath' """.stripMargin)
+ checkExistence(sql("select * from sdkOutputTable"), true, "12.80")
+ }
+
+ test("test logical type decimal with data having greater precision") {
--- End diff --
Please add a test to verify boundary values like negative and large big
decimal values (Big precission needs long to store unscaled value).
---