Github user xuchuanyin commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/2685#discussion_r214625972
  
    --- Diff: 
integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceTest.scala
 ---
    @@ -590,12 +596,60 @@ class SparkCarbonDataSourceTest extends FunSuite with 
BeforeAndAfterAll {
         }
       }
     
    +  test("test read using old data") {
    +    val store = new StoreCreator(new File(warehouse1).getAbsolutePath,
    +      new File(warehouse1 + 
"../../../../../hadoop/src/test/resources/data.csv").getCanonicalPath,
    +      false)
    +    store.createCarbonStore()
    +    FileFactory.deleteAllFilesOfDir(new 
File(warehouse1+"/testdb/testtable/Fact/Part0/Segment_0/0"))
    +    val dfread = 
spark.read.format("carbon").load(warehouse1+"/testdb/testtable/Fact/Part0/Segment_0")
    +    dfread.show(false)
    +    spark.sql("drop table if exists parquet_table")
    +  }
    +
    +  test("test read using different sort order data") {
    +    spark.sql("drop table if exists old_comp")
    +    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/testdb"))
    +    val store = new StoreCreator(new File(warehouse1).getAbsolutePath,
    +      new File(warehouse1 + 
"../../../../../hadoop/src/test/resources/data.csv").getCanonicalPath,
    +      false)
    +    store.setSortCOls(new util.ArrayList[String](Seq ("name").asJava))
    +    var model = store.createTableAndLoadModel(false)
    +    model.setSegmentId("0")
    +    store.createCarbonStore(model)
    +    FileFactory.deleteAllFilesOfDir(new 
File(warehouse1+"/testdb/testtable/Fact/Part0/Segment_0/0"))
    +    store.setSortCOls(new util.ArrayList[String](Seq 
("country,phonetype").asJava))
    +    model = store.createTableAndLoadModel(false)
    +    model.setSegmentId("1")
    +    store.createCarbonStore(model)
    +    FileFactory.deleteAllFilesOfDir(new 
File(warehouse1+"/testdb/testtable/Fact/Part0/Segment_1/0"))
    +    store.setSortCOls(new util.ArrayList[String](Seq ("date").asJava))
    +    model = store.createTableAndLoadModel(false)
    +    model.setSegmentId("2")
    +    store.createCarbonStore(model)
    +    FileFactory.deleteAllFilesOfDir(new 
File(warehouse1+"/testdb/testtable/Fact/Part0/Segment_2/0"))
    +    store.setSortCOls(new util.ArrayList[String](Seq 
("serialname").asJava))
    +    model = store.createTableAndLoadModel(false)
    +    model.setSegmentId("3")
    +    store.createCarbonStore(model)
    +    FileFactory.deleteAllFilesOfDir(new 
File(warehouse1+"/testdb/testtable/Fact/Part0/Segment_3/0"))
    +    spark.sql(s"create table old_comp(id int, date string, country string, 
name string, phonetype string, serialname string, salary int) using carbon 
options(path='$warehouse1/testdb/testtable/Fact/Part0/', 
'sort_columns'='name')")
    +    assert(spark.sql("select * from old_comp where 
country='china'").count() == 3396)
    +    assert(spark.sql("select * from old_comp ").count() == 4000)
    +    spark.sql("drop table if exists old_comp")
    +
    +    spark.sql(s"create table old_comp1 using carbon 
options(path='$warehouse1/testdb/testtable/Fact/Part0/')")
    +    assert(spark.sql("select * from old_comp1 where 
country='china'").count() == 3396)
    +    assert(spark.sql("select * from old_comp1 ").count() == 4000)
    +    spark.sql("drop table if exists old_comp1")
    +    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/testdb"))
    +  }
     
       override protected def beforeAll(): Unit = {
         drop
       }
     
    -  override def afterAll(): Unit = {
    --- End diff --
    
    unnecessary modification...


---

Reply via email to