Github user KanakaKumar commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/2687#discussion_r216122672
  
    --- Diff: 
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithAvroDataType.scala
 ---
    @@ -884,4 +962,220 @@ class TestNonTransactionalCarbonTableWithAvroDataType 
extends QueryTest with Bef
         checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(1728000, 
Row(1728000))))
       }
     
    +  test("test logical type decimal through Json") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "id",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 5,
    +        |                     "scale": 2
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(5, 2)
    +    val decimal = new java.math.BigDecimal("12.8").setScale(2)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("id").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"id":"$data"}""".stripMargin
    +    val record = testUtil.jsonToAvro(json1, schema1)
    +    val data1 = new String(record.get(0).asInstanceOf[ByteBuffer].array(),
    +      CarbonCommonConstants.DEFAULT_CHARSET_CLASS)
    +    val bytes1 = 
ByteBuffer.wrap(DatatypeConverter.parseBase64Binary(data1))
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("id", bytes1)
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkExistence(sql("select * from sdkOutputTable"), true, "12.80")
    +  }
    +
    +  test("test logical type decimal through Avro") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "id",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 5,
    +        |                     "scale": 2
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(5, 2)
    +    val decimal = new java.math.BigDecimal("12.8").setScale(2)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("id").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"id":"$data"}""".stripMargin
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("id", bytes)
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkExistence(sql("select * from sdkOutputTable"), true, "12.80")
    +  }
    +
    +  test("test logical type decimal with data having greater precision") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "com.apache.schema",
    +        |  "type": "record",
    +        |  "name": "StudentActivity",
    +        |  "fields": [
    +        |          {
    +        |                  "name": "id",
    +        |                                          "type": {"type" : 
"bytes",
    +        |                     "logicalType": "decimal",
    +        |                     "precision": 5,
    +        |                     "scale": 2
    +        |                    }
    +        |}
    +        |  ]
    +        |}""".stripMargin
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val logicalType = LogicalTypes.decimal(5, 2)
    +    val decimal = new java.math.BigDecimal("1218").setScale(2)
    +    //get unscaled 2's complement bytearray
    +    val bytes =
    +      decimalConversion.toBytes(decimal, nn.getField("id").schema, 
logicalType)
    +    val data = DatatypeConverter.printBase64Binary(bytes.array())
    +    val json1 =
    +      s"""{"id":"$data"}""".stripMargin
    +    val avroRec = new GenericData. Record(nn)
    +    avroRec.put("id", bytes)
    +    val exception1 = intercept[Exception] {
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(avroRec)
    +    writer.close()
    +    }
    +    assert(exception1.getMessage
    +      .contains("Data Loading failed as value Precision 6 is greater than 
specified Precision 5 in Avro Schema"))
    +  }
    +
    +  test("test union with multiple record type") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "test.avro",
    +        |  "type": "record",
    +        |  "name": "NewCar2",
    +        |  "fields": [
    +        |      {
    +        |            "name": "optionalExtra",
    +        |    "type": ["null",{
    +        |       "type":"record",
    +        |       "name":"Stereo",
    +        |       "fields" :[{
    +        |       "name":"make",
    +        |       "type":"string"
    +        |       },
    +        |       {
    +        |       "name":"speakers",
    +        |       "type":"int"
    +        |       }]
    +        |       },{
    +        |       "type":"record",
    +        |       "name":"LeatherTrim",
    +        |       "fields":[{
    +        |       "name":"colour",
    +        |       "type":"string"
    +        |       }]
    +        |       }],
    +        |       "default":null
    +        |       }]
    +        |
    +        |}""".stripMargin
    +
    +    val json1 =
    +      
"""{"optionalExtra":{"test.avro.LeatherTrim":{"colour":"ab"}}}""".stripMargin
    +
    +    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
    +    val record = testUtil.jsonToAvro(json1, schema1)
    +
    +    val writer = CarbonWriter.builder
    +      
.outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
    +    writer.write(record)
    +    writer.close()
    +    sql(
    +      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY
    +         |'carbondata' LOCATION
    +         |'$writerPath' """.stripMargin)
    +    checkAnswer(sql("select * from sdkOutputTable"),
    +      Seq(Row(Row(Row(null,null),Row("ab")))))
    +  }
    +
    +  test("test union with multiple Enum type") {
    +    sql("drop table if exists sdkOutputTable")
    +    
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
    +    val schema1 =
    +      """{
    +        |  "namespace": "test.avro",
    +        |  "type": "record",
    +        |  "name": "Union_data3",
    +        |  "fields": [
    +        |      {
    +        |            "name": "emp_id",
    --- End diff --
    
    Please use related names for field. emp_id and union types are not related


---

Reply via email to