Github user praveenmeenakshi56 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2621#discussion_r208528095
--- Diff:
integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
---
@@ -0,0 +1,407 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream,
DataInputStream, File, InputStream}
+import java.sql.Timestamp
+
+import scala.collection.mutable
+
+import org.apache.avro.file.DataFileWriter
+import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter,
GenericRecord}
+import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.apache.commons.io.FileUtils
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.QueryTest
+import org.apache.spark.sql.test.TestQueryExecutor
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.sdk.file.CarbonWriter
+
+/**
+ * Test Class for ComplexDataTypeTestCase to verify all scenerios
+ */
+
+class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
+
+ val filePath = TestQueryExecutor.integrationPath +
"/spark-common-test/src/test/resources"
+ val writerPath =
+ s"${ resourcesPath }" + "/SparkCarbonFileFormat/WriterOutputComplex/"
+
+ override def beforeAll(): Unit = {
+ FileUtils.deleteDirectory(new File(writerPath))
+ sql("DROP TABLE IF EXISTS complexcarbontable")
+ sql("DROP TABLE IF EXISTS test")
+ sql("DROP TABLE IF EXISTS sdkOutputTable")
+ }
+
+ override def afterAll(): Unit = {
+ FileUtils.deleteDirectory(new File(writerPath))
+ sql("DROP TABLE IF EXISTS complexcarbontable")
+ sql("DROP TABLE IF EXISTS test")
+ sql("DROP TABLE IF EXISTS sdkOutputTable")
+ CarbonProperties.getInstance()
+ .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+ CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+ .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
+ CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
+ }
+
+ // check create table with complex data type
+ test("test Complex_DataType-001") {
+ sql("DROP TABLE IF EXISTS test")
+ sql(
+ "create table test(person
struct<detail:struct<id:int,name:string,height:double," +
+ "status:boolean,dob:date,dobt:timestamp>>) stored by 'carbondata'")
+ sql("DROP TABLE IF EXISTS test")
+ sql(
+ "create table test(p1 array<int>,p2 array<string>,p3
array<double>,p4 array<boolean>,p5 " +
+ "array<date>,p6 array<timestamp>) stored by 'carbondata'")
+ }
--- End diff --
remove this test case. It is not making use of the cluster. If it is in UT,
it is enough
---