Repository: carbondata
Updated Branches:
  refs/heads/master df5d7a99e -> 01a2a893f


[HOTFIX] Add sdv required csv files to be downloaded from github to download 
list

fix ci

This closes #2094


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/01a2a893
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/01a2a893
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/01a2a893

Branch: refs/heads/master
Commit: 01a2a893f3fb1c8eb4436d06258ea6f1f5468e60
Parents: df5d7a9
Author: Raghunandan S <[email protected]>
Authored: Fri Mar 23 14:06:39 2018 +0530
Committer: chenliang613 <[email protected]>
Committed: Sat Mar 24 10:03:29 2018 +0800

----------------------------------------------------------------------
 .../src/test/resources/testdatafileslist.txt    |   3 +-
 .../DataRetentionConcurrencyTestCase.scala      | 112 -------------------
 2 files changed, 2 insertions(+), 113 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/01a2a893/integration/spark-common-cluster-test/src/test/resources/testdatafileslist.txt
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-cluster-test/src/test/resources/testdatafileslist.txt
 
b/integration/spark-common-cluster-test/src/test/resources/testdatafileslist.txt
index 331a89d..7c95938 100644
--- 
a/integration/spark-common-cluster-test/src/test/resources/testdatafileslist.txt
+++ 
b/integration/spark-common-cluster-test/src/test/resources/testdatafileslist.txt
@@ -234,4 +234,5 @@ Data/splchar.csv
 source.csv
 Data/v1_version/metastore_db.zip
 Data/v1_version/store.zip
-Data/timeseriestest.csv
\ No newline at end of file
+Data/timeseriestest.csv
+Data/partition/list_partition_table.csv
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/01a2a893/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionConcurrencyTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionConcurrencyTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionConcurrencyTestCase.scala
deleted file mode 100644
index 82c5b7b..0000000
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionConcurrencyTestCase.scala
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.dataretention
-
-import java.util
-import java.util.concurrent.{Callable, Executors, Future}
-
-import scala.collection.JavaConverters._
-
-import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-
-/**
- * This class contains DataRetention concurrency test cases
- */
-class DataRetentionConcurrencyTestCase extends QueryTest with 
BeforeAndAfterAll {
-
-  private val executorService = Executors.newFixedThreadPool(10)
-
-  override def beforeAll {
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.MAX_TIMEOUT_FOR_CONCURRENT_LOCK, "1")
-    sql("drop table if exists concurrent")
-    sql(
-      "create table concurrent (ID int, date String, country String, name " +
-      "String," +
-      "phonetype String, serialname String, salary int) stored by 
'org.apache.carbondata.format'"
-
-    )
-    sql(
-      s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE 
concurrent " +
-      "OPTIONS('DELIMITER' =  ',')")
-  }
-
-  override def afterAll {
-    executorService.shutdownNow()
-    sql("drop table if exists concurrent")
-  }
-
-  ignore("DataRetention_Concurrency_load_id") {
-
-    val tasks = new util.ArrayList[Callable[String]]()
-    tasks
-      .add(new QueryTask(s"LOAD DATA LOCAL INPATH 
'$resourcesPath/dataretention1.csv' INTO TABLE " +
-                         s"concurrent OPTIONS('DELIMITER' =  ',')"))
-    tasks.add(new QueryTask("delete from table concurrent where segment.id in 
(0)"))
-    tasks.add(new QueryTask("clean files for table concurrent"))
-    val futures = executorService.invokeAll(tasks)
-    val results = futures.asScala.map(_.get)
-    for (i <- results.indices) {
-      assert("PASS".equals(results(i)))
-    }
-  }
-
-  test("DataRetention_Concurrency_load_date") {
-
-    sql(
-      s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE 
concurrent " +
-      "OPTIONS('DELIMITER' =  ',')")
-
-    val tasks = new util.ArrayList[Callable[String]]()
-    tasks
-      .add(new QueryTask(s"LOAD DATA LOCAL INPATH 
'$resourcesPath/dataretention1.csv' INTO TABLE " +
-                         s"concurrent OPTIONS('DELIMITER' =  ',')"))
-    tasks
-      .add(new QueryTask(
-        "delete from table concurrent where segment.starttime before 
'2099-01-01 00:00:00'"))
-    tasks.add(new QueryTask("clean files for table concurrent"))
-
-    val futures: util.List[Future[String]] = executorService.invokeAll(tasks)
-
-    val results = futures.asScala.map(_.get)
-    for (i <- results.indices) {
-      assert("PASS".equals(results(i)))
-    }
-  }
-
-  class QueryTask(query: String) extends Callable[String] {
-    override def call(): String = {
-      var result = "PASS"
-      try {
-        LOGGER.info("Executing :" + Thread.currentThread().getName)
-        sql(query)
-      } catch {
-        case ex: Exception =>
-          ex.printStackTrace()
-          result = "FAIL"
-      }
-      result
-    }
-  }
-
-}

Reply via email to