Github user KanakaKumar commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/2687#discussion_r216128016
  
    --- Diff: 
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithAvroDataType.scala
 ---
    @@ -884,4 +962,310 @@ class TestNonTransactionalCarbonTableWithAvroDataType 
extends QueryTest with Bef
         checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(1728000, 
Row(1728000))))
       }
     
    +  test("test logical type decimal through Json") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "id",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 5,
    +        |                     "scale": 2
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(5, 2)
    +    val decimal = new java.math.BigDecimal("12.8").setScale(2)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("id").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"id":"$data"}""".stripMargin
    +    val record = testUtil.jsonToAvro(json1, schema1)
    +    val data1 = new String(record.get(0).asInstanceOf[ByteBuffer].array(),
    +      CarbonCommonConstants.DEFAULT_CHARSET_CLASS)
    +    val bytes1 = 
ByteBuffer.wrap(DatatypeConverter.parseBase64Binary(data1))
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("id", bytes1)
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkExistence(sql("select * from sdkOutputTable"), true, "12.80")
    +  }
    +
    +  test("test logical type decimal through Json with big decimal value") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "dec_field",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 30,
    +        |                     "scale": 10
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(30, 10)
    +    val decimal = new 
java.math.BigDecimal("12672346879023.845789").setScale(10)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("dec_field").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"dec_field":"$data"}""".stripMargin
    +    val record = testUtil.jsonToAvro(json1, schema1)
    +    val data1 = new String(record.get(0).asInstanceOf[ByteBuffer].array(),
    +      CarbonCommonConstants.DEFAULT_CHARSET_CLASS)
    +    val bytes1 = 
ByteBuffer.wrap(DatatypeConverter.parseBase64Binary(data1))
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("dec_field", bytes1)
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkExistence(sql("select * from sdkOutputTable"), true, 
"12672346879023.8457890000")
    +  }
    +
    +  test("test logical type decimal through Json with negative decimal 
value") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "dec_field",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 30,
    +        |                     "scale": 6
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(30, 6)
    +    val decimal = new 
java.math.BigDecimal("-12672346879023.845").setScale(6)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("dec_field").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"dec_field":"$data"}""".stripMargin
    +    val record = testUtil.jsonToAvro(json1, schema1)
    +    val data1 = new String(record.get(0).asInstanceOf[ByteBuffer].array(),
    +      CarbonCommonConstants.DEFAULT_CHARSET_CLASS)
    +    val bytes1 = 
ByteBuffer.wrap(DatatypeConverter.parseBase64Binary(data1))
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("dec_field", bytes1)
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkExistence(sql("select * from sdkOutputTable"), true, 
"-12672346879023.845000")
    +  }
    +
    +  test("test logical type decimal through Avro") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "dec_field",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 5,
    +        |                     "scale": 2
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(5, 2)
    +    val decimal = new java.math.BigDecimal("12.8").setScale(2)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("dec_field").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"dec_field":"$data"}""".stripMargin
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("dec_field", bytes)
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkExistence(sql("select * from sdkOutputTable"), true, "12.80")
    +  }
    +
    +  test("test logical type decimal with data having greater precision than 
specified precision") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "dec_field",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 5,
    +        |                     "scale": 2
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(5, 2)
    +    val decimal = new java.math.BigDecimal("1218").setScale(2)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("dec_field").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"dec_field":"$data"}""".stripMargin
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("dec_field", bytes)
    +    val exception1 = intercept[Exception] {
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    }
    +    assert(exception1.getMessage
    +      .contains("Data Loading failed as value Precision 6 is greater than 
specified Precision 5 in Avro Schema"))
    +  }
    +
    +  test("test union with multiple record type") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "test.avro",
    +        |  "type": "record",
    +        |  "name": "NewCar2",
    +        |  "fields": [
    +        |      {
    +        |            "name": "optionalExtra",
    +        |    "type": ["null",{
    +        |       "type":"record",
    +        |       "name":"Stereo",
    +        |       "fields" :[{
    +        |       "name":"make",
    +        |       "type":"string"
    +        |       },
    +        |       {
    +        |       "name":"speakers",
    +        |       "type":"int"
    +        |       }]
    +        |       },{
    +        |       "type":"record",
    +        |       "name":"LeatherTrim",
    +        |       "fields":[{
    +        |       "name":"colour",
    +        |       "type":"string"
    +        |       }]
    +        |       }],
    +        |       "default":null
    +        |       }]
    +        |
    +        |}""".stripMargin
    +
    +    val json1 =
    +      
"""{"optionalExtra":{"test.avro.LeatherTrim":{"colour":"ab"}}}""".stripMargin
    +
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val record = testUtil.jsonToAvro(json1, schema1)
    +
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(record)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkAnswer(sql("select * from sdkOutputTable"),
    +      Seq(Row(Row(Row(null,null),Row("ab")))))
    +  }
    +
    +  test("test union with multiple Enum type") {
    --- End diff --
    
    Can you add test case to read using data source file format. (Syntax "using 
carbon" with schema Refer SparkCarbonDataSourceTest ).
    This will help users on how to define schema for avro logcial types & union 
.


---

Reply via email to