Repository: carbondata
Updated Branches:
  refs/heads/master 3e36639ed -> 9c12f5dc8


[HOTFIX] Fixed all examples

Fixed all examples

This closes #2024


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/9c12f5dc
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/9c12f5dc
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/9c12f5dc

Branch: refs/heads/master
Commit: 9c12f5dc856ca034694841ec1537eaa0366eec96
Parents: 3e36639
Author: ravipesala <ravi.pes...@gmail.com>
Authored: Fri Mar 2 21:46:50 2018 +0530
Committer: chenliang613 <chenliang...@huawei.com>
Committed: Sat Mar 3 12:11:03 2018 +0800

----------------------------------------------------------------------
 .../apache/carbondata/examples/CarbonDataFrameExample.scala  | 8 ++++----
 .../carbondata/examples/CarbonSortColumnsExample.scala       | 6 ++----
 .../spark/testsuite/dataload/TestLoadDataGeneral.scala       | 1 +
 .../sql/execution/strategy/CarbonLateDecodeStrategy.scala    | 5 ++++-
 .../org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala  | 2 +-
 5 files changed, 12 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/9c12f5dc/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
----------------------------------------------------------------------
diff --git 
a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
 
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
index fe15659..c8f8023 100644
--- 
a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
+++ 
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
@@ -54,13 +54,13 @@ object CarbonDataFrameExample {
     // Saves dataframe to carbondata file
     df.write
       .format("carbondata")
-      .option("tableName", "carbon_table")
+      .option("tableName", "carbon_df_table")
       .option("compress", "true")
       .option("tempCSV", "false")
       .mode(SaveMode.Overwrite)
       .save()
 
-    spark.sql(""" SELECT * FROM carbon_table """).show()
+    spark.sql(""" SELECT * FROM carbon_df_table """).show()
 
     // Specify schema
     import org.apache.spark.sql.types.{StructType, StructField, StringType, 
IntegerType}
@@ -74,7 +74,7 @@ object CarbonDataFrameExample {
       .format("carbondata")
       .schema(customSchema)
       // .option("dbname", "db_name") the system will use "default" as dbname 
if not set this option
-      .option("tableName", "carbon_table")
+      .option("tableName", "carbon_df_table")
       .load()
 
     // Dataframe operations
@@ -82,7 +82,7 @@ object CarbonDataFrameExample {
     carbondf.select($"c1", $"number" + 10).show()
     carbondf.filter($"number" > 31).show()
 
-    spark.sql("DROP TABLE IF EXISTS carbon_table")
+    spark.sql("DROP TABLE IF EXISTS carbon_df_table")
 
     spark.stop()
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9c12f5dc/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
----------------------------------------------------------------------
diff --git 
a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
 
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
index 3a9f26b..8d0eabf 100644
--- 
a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
+++ 
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
@@ -99,15 +99,13 @@ object CarbonSortColumnsExample {
       s"""
          | LOAD DATA LOCAL INPATH '$path'
          | INTO TABLE no_sort_columns_table
-         | 
OPTIONS('FILEHEADER'='shortField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData',
-         | 'COMPLEX_DELIMITER_LEVEL_1'='#')
+         | OPTIONS('COMPLEX_DELIMITER_LEVEL_1'='#')
        """.stripMargin)
     spark.sql(
       s"""
          | LOAD DATA LOCAL INPATH '$path'
          | INTO TABLE sort_columns_table
-         | 
OPTIONS('FILEHEADER'='shortField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData',
-         | 'COMPLEX_DELIMITER_LEVEL_1'='#')
+         | OPTIONS('COMPLEX_DELIMITER_LEVEL_1'='#')
        """.stripMargin)
     // scalastyle:on
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9c12f5dc/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
index 09ca9e5..ec4e143 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
@@ -177,6 +177,7 @@ class TestLoadDataGeneral extends QueryTest with 
BeforeAndAfterEach {
     intercept[Exception] {
       sql("insert into load32000chardata_dup select 
dim1,concat(load32000chardata.dim2,'aaaa'),mes1 from load32000chardata").show()
     }
+    sql(s"LOAD DATA LOCAL INPATH '$testdata' into table load32000chardata_dup 
OPTIONS('FILEHEADER'='dim1,dim2,mes1')")
     intercept[Exception] {
       sql("update load32000chardata_dup 
set(load32000chardata_dup.dim2)=(select concat(load32000chardata.dim2,'aaaa') 
from load32000chardata)").show()
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9c12f5dc/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
index 48679b1..668c4cc 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
@@ -145,7 +145,10 @@ private[sql] class CarbonLateDecodeStrategy extends 
SparkStrategy {
       filterPredicates: Seq[Expression],
       scanBuilder: (Seq[Attribute], Array[Filter],
         ArrayBuffer[AttributeReference], Seq[PartitionSpec]) => 
RDD[InternalRow]) = {
-    val names = relation.catalogTable.get.partitionColumnNames
+    val names = relation.catalogTable match {
+      case Some(table) => table.partitionColumnNames
+      case _ => Seq.empty
+    }
     // Get the current partitions from table.
     var partitions: Seq[PartitionSpec] = null
     if (names.nonEmpty) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9c12f5dc/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
index 7addd26..86790ba 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
@@ -227,7 +227,7 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
             }
 
           } else {
-            (sel, updateRelation(tab._1, tab._2, tab._4, Some(tab._3.get)))
+            (sel, updateRelation(tab._1, tab._2, tab._4, tab._3))
           }
         val rel = tab._3 match {
           case Some(a) => UpdateTable(relation, columns, selectStmt, 
Some(tab._3.get), where)

Reply via email to