Repository: incubator-carbondata
Updated Branches:
  refs/heads/branch-1.1 d3bb59749 -> 950a6d0f5 (forced update)


add decimal column without scale and precision is failing.


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/f0fe41b9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/f0fe41b9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/f0fe41b9

Branch: refs/heads/branch-1.1
Commit: f0fe41b9cc1b547062ede9557d6e397e1ce38974
Parents: 3b62d25
Author: nareshpr <prnaresh.nar...@gmail.com>
Authored: Thu Mar 30 16:52:51 2017 +0530
Committer: ravipesala <ravi.pes...@gmail.com>
Committed: Tue Apr 4 16:27:34 2017 +0530

----------------------------------------------------------------------
 .../org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala  | 9 ++++++---
 .../restructure/AlterTableValidationTestCase.scala          | 8 ++++++++
 2 files changed, 14 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f0fe41b9/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index 8120942..9d2c245 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -873,9 +873,12 @@ abstract class CarbonDDLSqlParser extends 
AbstractCarbonSparkSQLParser {
    * Matching the decimal(10,0) data type and returning the same.
    */
   private lazy val decimalType =
-  DECIMAL ~ ("(" ~> numericLit <~ ",") ~ (numericLit <~ ")") ^^ {
-    case decimal ~ precision ~ scale =>
-      s"$decimal($precision, $scale)"
+  DECIMAL ~ (("(" ~> numericLit <~ ",") ~ (numericLit <~ ")")).? ^^ {
+    case decimal ~ precisionAndScale => if (precisionAndScale.isDefined) {
+      s"$decimal(${ precisionAndScale.get._1 }, ${ precisionAndScale.get._2 })"
+    } else {
+      s"$decimal(10,0)"
+    }
   }
 
   protected lazy val nestedType: Parser[Field] = structFieldType | 
arrayFieldType |

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f0fe41b9/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index 0a59497..bd34913 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -104,6 +104,14 @@ class AlterTableValidationTestCase extends QueryTest with 
BeforeAndAfterAll {
     checkExistence(sql("desc restructure"), true, "dcmldecimal(5,4)")
   }
 
+  test(
+    "test add decimal without scale and precision, default precision and scale 
(10,0) should be " +
+    "used")
+  {
+    sql("alter table restructure add columns(dcmldefault decimal)")
+    checkExistence(sql("desc restructure"), true, "dcmldefaultdecimal(10,0)")
+  }
+
   test("test adding existing measure as dimension") {
     sql("alter table restructure add columns(dcmlfld decimal(5,4))")
     try {

Reply via email to