Repository: incubator-impala
Updated Branches:
  refs/heads/master 60414f063 -> bb36433b1


http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/fe/src/main/jflex/sql-scanner.flex
----------------------------------------------------------------------
diff --git a/fe/src/main/jflex/sql-scanner.flex 
b/fe/src/main/jflex/sql-scanner.flex
index 099ceda..3d3b24d 100644
--- a/fe/src/main/jflex/sql-scanner.flex
+++ b/fe/src/main/jflex/sql-scanner.flex
@@ -68,6 +68,7 @@ import org.apache.impala.analysis.SqlParserSymbols;
     keywordMap.put("between", new Integer(SqlParserSymbols.KW_BETWEEN));
     keywordMap.put("bigint", new Integer(SqlParserSymbols.KW_BIGINT));
     keywordMap.put("binary", new Integer(SqlParserSymbols.KW_BINARY));
+    keywordMap.put("block_size", new Integer(SqlParserSymbols.KW_BLOCKSIZE));
     keywordMap.put("boolean", new Integer(SqlParserSymbols.KW_BOOLEAN));
     keywordMap.put("buckets", new Integer(SqlParserSymbols.KW_BUCKETS));
     keywordMap.put("by", new Integer(SqlParserSymbols.KW_BY));
@@ -82,6 +83,7 @@ import org.apache.impala.analysis.SqlParserSymbols;
     keywordMap.put("column", new Integer(SqlParserSymbols.KW_COLUMN));
     keywordMap.put("columns", new Integer(SqlParserSymbols.KW_COLUMNS));
     keywordMap.put("comment", new Integer(SqlParserSymbols.KW_COMMENT));
+    keywordMap.put("compression", new 
Integer(SqlParserSymbols.KW_COMPRESSION));
     keywordMap.put("compute", new Integer(SqlParserSymbols.KW_COMPUTE));
     keywordMap.put("create", new Integer(SqlParserSymbols.KW_CREATE));
     keywordMap.put("cross", new Integer(SqlParserSymbols.KW_CROSS));
@@ -92,6 +94,7 @@ import org.apache.impala.analysis.SqlParserSymbols;
     keywordMap.put("date", new Integer(SqlParserSymbols.KW_DATE));
     keywordMap.put("datetime", new Integer(SqlParserSymbols.KW_DATETIME));
     keywordMap.put("decimal", new Integer(SqlParserSymbols.KW_DECIMAL));
+    keywordMap.put("default", new Integer(SqlParserSymbols.KW_DEFAULT));
     keywordMap.put("delete", new Integer(SqlParserSymbols.KW_DELETE));
     keywordMap.put("delimited", new Integer(SqlParserSymbols.KW_DELIMITED));
     keywordMap.put("desc", new Integer(SqlParserSymbols.KW_DESC));
@@ -102,6 +105,7 @@ import org.apache.impala.analysis.SqlParserSymbols;
     keywordMap.put("double", new Integer(SqlParserSymbols.KW_DOUBLE));
     keywordMap.put("drop", new Integer(SqlParserSymbols.KW_DROP));
     keywordMap.put("else", new Integer(SqlParserSymbols.KW_ELSE));
+    keywordMap.put("encoding", new Integer(SqlParserSymbols.KW_ENCODING));
     keywordMap.put("end", new Integer(SqlParserSymbols.KW_END));
     keywordMap.put("escaped", new Integer(SqlParserSymbols.KW_ESCAPED));
     keywordMap.put("exists", new Integer(SqlParserSymbols.KW_EXISTS));

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java 
b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
index 0a78dd5..ed900bf 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
@@ -48,6 +48,8 @@ import org.apache.impala.common.FrontendTestBase;
 import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.testutil.TestUtils;
 import org.apache.impala.util.MetaStoreUtil;
+import org.apache.kudu.ColumnSchema.CompressionAlgorithm;
+import org.apache.kudu.ColumnSchema.Encoding;
 import org.junit.Test;
 
 import com.google.common.base.Joiner;
@@ -1615,6 +1617,9 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     // Supported file formats. Exclude Avro since it is tested separately.
     String [] fileFormats =
         {"TEXTFILE", "SEQUENCEFILE", "PARQUET", "PARQUETFILE", "RCFILE"};
+    String [] fileFormatsStr =
+        {"TEXT", "SEQUENCE_FILE", "PARQUET", "PARQUET", "RC_FILE"};
+    int formatIndx = 0;
     for (String format: fileFormats) {
       for (String create: ImmutableList.of("create table", "create external 
table")) {
         AnalyzesOk(String.format("%s new_table (i int) " +
@@ -1625,9 +1630,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
             "Table requires at least 1 column");
       }
       AnalysisError(String.format("create table t (i int primary key) stored 
as %s",
-          format), "Only Kudu tables can specify a PRIMARY KEY");
+          format), String.format("Unsupported column options for file format " 
+
+              "'%s': 'i INT PRIMARY KEY'", fileFormatsStr[formatIndx]));
       AnalysisError(String.format("create table t (i int, primary key(i)) 
stored as %s",
           format), "Only Kudu tables can specify a PRIMARY KEY");
+      formatIndx++;
     }
 
     // Note: Backslashes need to be escaped twice - once for Java and once for 
Impala.
@@ -1986,7 +1993,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
         "partition 10 < values <= 30, partition 30 < values) " +
         "stored as kudu tblproperties('kudu.master_addresses'='127.0.0.1')");
     // Not using the STORED AS KUDU syntax to specify a Kudu table
-    AnalysisError("create table tab (x int primary key) tblproperties (" +
+    AnalysisError("create table tab (x int) tblproperties (" +
         "'storage_handler'='com.cloudera.kudu.hive.KuduStorageHandler')",
         CreateTableStmt.KUDU_STORAGE_HANDLER_ERROR_MESSAGE);
     AnalysisError("create table tab (x int primary key) stored as kudu 
tblproperties (" +
@@ -2035,6 +2042,86 @@ public class AnalyzeDDLTest extends FrontendTestBase {
           "distribute by hash(x) into 3 buckets stored as kudu", t);
       AnalysisError(stmt, expectedError);
     }
+
+    // Test column options
+    String[] nullability = {"not null", "null", ""};
+    String[] defaultVal = {"default 10", ""};
+    String[] blockSize = {"block_size 4096", ""};
+    for (Encoding enc: Encoding.values()) {
+      for (CompressionAlgorithm comp: CompressionAlgorithm.values()) {
+        for (String nul: nullability) {
+          for (String def: defaultVal) {
+            for (String block: blockSize) {
+              AnalyzesOk(String.format("create table tab (x int primary key " +
+                  "not null encoding %s compression %s %s %s, y int encoding 
%s " +
+                  "compression %s %s %s %s) distribute by hash (x) " +
+                  "into 3 buckets stored as kudu", enc, comp, def, block, enc,
+                  comp, def, nul, block));
+            }
+          }
+        }
+      }
+    }
+    // Primary key specified using the PRIMARY KEY clause
+    AnalyzesOk("create table tab (x int not null encoding plain_encoding " +
+        "compression snappy block_size 1, y int null encoding rle compression 
lz4 " +
+        "default 1, primary key(x)) distribute by hash (x) into 3 buckets " +
+        "stored as kudu");
+    // Primary keys can't be null
+    AnalysisError("create table tab (x int primary key null, y int not null) " 
+
+        "distribute by hash (x) into 3 buckets stored as kudu", "Primary key 
columns " +
+        "cannot be nullable: x INT PRIMARY KEY NULL");
+    AnalysisError("create table tab (x int not null, y int null, primary key 
(x, y)) " +
+        "distribute by hash (x) into 3 buckets stored as kudu", "Primary key 
columns " +
+        "cannot be nullable: y INT NULL");
+    // Unsupported encoding value
+    AnalysisError("create table tab (x int primary key, y int encoding 
invalid_enc) " +
+        "distribute by hash (x) into 3 buckets stored as kudu", "Unsupported 
encoding " +
+        "value 'INVALID_ENC'. Supported encoding values are: " +
+        Joiner.on(", ").join(Encoding.values()));
+    // Unsupported compression algorithm
+    AnalysisError("create table tab (x int primary key, y int compression " +
+        "invalid_comp) distribute by hash (x) into 3 buckets stored as kudu",
+        "Unsupported compression algorithm 'INVALID_COMP'. Supported 
compression " +
+        "algorithms are: " + Joiner.on(", 
").join(CompressionAlgorithm.values()));
+    // Default values
+    AnalyzesOk("create table tab (i1 tinyint default 1, i2 smallint default 
10, " +
+        "i3 int default 100, i4 bigint default 1000, vals string default 
'test', " +
+        "valf float default cast(1.2 as float), vald double default " +
+        "cast(3.1452 as double), valb boolean default true, " +
+        "primary key (i1, i2, i3, i4, vals)) distribute by hash (i1) into 3 " +
+        "buckets stored as kudu");
+    AnalyzesOk("create table tab (i int primary key default 1+1+1) " +
+        "distribute by hash (i) into 3 buckets stored as kudu");
+    AnalyzesOk("create table tab (i int primary key default factorial(5)) " +
+        "distribute by hash (i) into 3 buckets stored as kudu");
+    AnalyzesOk("create table tab (i int primary key, x int null default " +
+        "isnull(null, null)) distribute by hash (i) into 3 buckets stored as 
kudu");
+    // Invalid default values
+    AnalysisError("create table tab (i int primary key default 'string_val') " 
+
+        "distribute by hash (i) into 3 buckets stored as kudu", "Default value 
" +
+        "'string_val' (type: STRING) is not compatible with column 'i' (type: 
INT).");
+    AnalysisError("create table tab (i int primary key, x int default 1.1) " +
+        "distribute by hash (i) into 3 buckets stored as kudu",
+        "Default value 1.1 (type: DECIMAL(2,1)) is not compatible with column 
" +
+        "'x' (type: INT).");
+    AnalysisError("create table tab (i tinyint primary key default 128) " +
+        "distribute by hash (i) into 3 buckets stored as kudu", "Default value 
" +
+        "128 (type: SMALLINT) is not compatible with column 'i' (type: 
TINYINT).");
+    AnalysisError("create table tab (i int primary key default isnull(null, 
null)) " +
+        "distribute by hash (i) into 3 buckets stored as kudu", "Default value 
of " +
+        "NULL not allowed on non-nullable column: 'i'");
+    AnalysisError("create table tab (i int primary key, x int not null " +
+        "default isnull(null, null)) distribute by hash (i) into 3 buckets " +
+        "stored as kudu", "Default value of NULL not allowed on non-nullable 
column: " +
+        "'x'");
+    // Invalid block_size values
+    AnalysisError("create table tab (i int primary key block_size 1.1) " +
+        "distribute by hash (i) into 3 buckets stored as kudu", "Invalid value 
" +
+        "for BLOCK_SIZE: 1.1. A positive INTEGER value is expected.");
+    AnalysisError("create table tab (i int primary key block_size 'val') " +
+        "distribute by hash (i) into 3 buckets stored as kudu", "Invalid value 
" +
+        "for BLOCK_SIZE: 'val'. A positive INTEGER value is expected.");
   }
 
   @Test
@@ -2279,11 +2366,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
         "Type 'STRUCT<f1:INT>' is not supported as partition-column type in 
column: x");
 
     // Kudu specific clauses used in an Avro table.
-    AnalysisError("create table functional.new_table (i int primary key) " +
+    AnalysisError("create table functional.new_table (i int) " +
         "distribute by hash(i) into 3 buckets stored as avro",
         "Only Kudu tables can use the DISTRIBUTE BY clause.");
     AnalysisError("create table functional.new_table (i int primary key) " +
-        "stored as avro", "Only Kudu tables can specify a PRIMARY KEY.");
+        "stored as avro", "Unsupported column options for file format 'AVRO': 
" +
+        "'i INT PRIMARY KEY'");
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java 
b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
index 69c90da..3cef4ff 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
@@ -929,8 +929,8 @@ public class ParserTest extends FrontendTestBase {
   @Test
   public void TestIdentQuoting() {
     ParsesOk("select a from `t`");
-    ParsesOk("select a from `default`.`t`");
-    ParsesOk("select a from `default`.t");
+    ParsesOk("select a from default.`t`");
+    ParsesOk("select a from default.t");
     ParsesOk("select a from default.`t`");
     ParsesOk("select 01a from default.`01_t`");
 
@@ -962,7 +962,7 @@ public class ParserTest extends FrontendTestBase {
 
     // Quoted identifiers can contain any characters except "`".
     ParsesOk("select a from `all types`");
-    ParsesOk("select a from `default`.`all types`");
+    ParsesOk("select a from default.`all types`");
     ParsesOk("select a from `~!@#$%^&*()-_=+|;:'\",<.>/?`");
     // Quoted identifiers do not unescape escape sequences.
     ParsesOk("select a from `ab\rabc`");
@@ -1676,7 +1676,7 @@ public class ParserTest extends FrontendTestBase {
 
   @Test
   public void TestKuduUpdate() {
-    TestUtils.assumeKuduIsSupported();
+    //TestUtils.assumeKuduIsSupported();
     ParserError("update (select * from functional_kudu.testtbl) a set name = 
'10'");
   }
 
@@ -2456,6 +2456,51 @@ public class ParserTest extends FrontendTestBase {
         "(PARTITION VALUES = 10) STORED AS KUDU");
     ParserError("CREATE TABLE Foo (a int) DISTRIBUTE BY RANGE (a) " +
         "(PARTITION 10 < VALUE < 20) STORED AS KUDU");
+
+    // Column options for Kudu tables
+    String[] encodings = {"encoding auto_encoding", "encoding plain_encoding",
+        "encoding prefix_encoding", "encoding group_varint", "encoding rle",
+        "encoding dict_encoding", "encoding bit_shuffle", "encoding unknown", 
""};
+    String[] compression = {"compression default_compression",
+        "compression no_compression", "compression snappy", "compression lz4",
+        "compression zlib", "compression unknown", ""};
+
+    String[] nullability = {"not null", "null", ""};
+    String[] defaultVal = {"default 10", ""};
+    String[] blockSize = {"block_size 4096", ""};
+    for (String enc: encodings) {
+      for (String comp: compression) {
+        for (String nul: nullability) {
+          for (String def: defaultVal) {
+            for (String block: blockSize) {
+              ParsesOk(String.format("CREATE TABLE Foo (i int PRIMARY KEY " +
+                  "%s %s %s %s %s) STORED AS KUDU", nul, enc, comp, def, 
block));
+              ParsesOk(String.format("CREATE TABLE Foo (i int PRIMARY KEY " +
+                  "%s %s %s %s %s) STORED AS KUDU", block, nul, enc, comp, 
def));
+              ParsesOk(String.format("CREATE TABLE Foo (i int PRIMARY KEY " +
+                  "%s %s %s %s %s) STORED AS KUDU", def, block, nul, enc, 
comp));
+              ParsesOk(String.format("CREATE TABLE Foo (i int PRIMARY KEY " +
+                  "%s %s %s %s %s) STORED AS KUDU", comp, def, block, nul, 
enc));
+              ParsesOk(String.format("CREATE TABLE Foo (i int PRIMARY KEY " +
+                  "%s %s %s %s %s) STORED AS KUDU", enc, comp, def, block, 
nul));
+              ParsesOk(String.format("CREATE TABLE Foo (i int PRIMARY KEY " +
+                  "%s %s %s %s %s) STORED AS KUDU", enc, comp, block, def, 
nul));
+            }
+          }
+        }
+      }
+    }
+    // Column option is specified multiple times for the same column
+    ParserError("CREATE TABLE Foo(a int PRIMARY KEY ENCODING RLE ENCODING 
PLAIN) " +
+        "STORED AS KUDU");
+    // Constant expr used in DEFAULT
+    ParsesOk("CREATE TABLE Foo(a int PRIMARY KEY, b int DEFAULT 1+1) STORED AS 
KUDU");
+    ParsesOk("CREATE TABLE Foo(a int PRIMARY KEY, b float DEFAULT cast(1.1 as 
float)) " +
+        "STORED AS KUDU");
+    // Non-literal value used in BLOCK_SIZE
+    ParserError("CREATE TABLE Foo(a int PRIMARY KEY, b int BLOCK_SIZE 1+1) " +
+        "STORED AS KUDU");
+    ParserError("CREATE TABLE Foo(a int PRIMARY KEY BLOCK_SIZE -1) STORED AS 
KUDU");
   }
 
   @Test
@@ -2886,7 +2931,7 @@ public class ParserTest extends FrontendTestBase {
         "select from t\n" +
         "       ^\n" +
         "Encountered: FROM\n" +
-        "Expected: ALL, CASE, CAST, DISTINCT, EXISTS, " +
+        "Expected: ALL, CASE, CAST, DEFAULT, DISTINCT, EXISTS, " +
         "FALSE, IF, INTERVAL, NOT, NULL, " +
         "STRAIGHT_JOIN, TRUNCATE, TRUE, IDENTIFIER\n");
 
@@ -2896,8 +2941,8 @@ public class ParserTest extends FrontendTestBase {
         "select c, b, c where a = 5\n" +
         "               ^\n" +
         "Encountered: WHERE\n" +
-        "Expected: AND, AS, BETWEEN, DIV, FROM, ILIKE, IN, IREGEXP, IS, LIKE, 
LIMIT, NOT, OR, " +
-        "ORDER, REGEXP, RLIKE, UNION, COMMA, IDENTIFIER\n");
+        "Expected: AND, AS, BETWEEN, DEFAULT, DIV, FROM, ILIKE, IN, IREGEXP, 
IS, LIKE, " +
+        "LIMIT, NOT, OR, ORDER, REGEXP, RLIKE, UNION, COMMA, IDENTIFIER\n");
 
     // missing table list
     ParserError("select c, b, c from where a = 5",
@@ -2905,7 +2950,7 @@ public class ParserTest extends FrontendTestBase {
         "select c, b, c from where a = 5\n" +
         "                    ^\n" +
         "Encountered: WHERE\n" +
-        "Expected: IDENTIFIER\n");
+        "Expected: DEFAULT, IDENTIFIER\n");
 
     // missing predicate in where clause (no group by)
     ParserError("select c, b, c from t where",
@@ -2913,7 +2958,7 @@ public class ParserTest extends FrontendTestBase {
         "select c, b, c from t where\n" +
         "                           ^\n" +
         "Encountered: EOF\n" +
-        "Expected: CASE, CAST, EXISTS, FALSE, " +
+        "Expected: CASE, CAST, DEFAULT, EXISTS, FALSE, " +
         "IF, INTERVAL, NOT, NULL, TRUNCATE, TRUE, IDENTIFIER\n");
 
     // missing predicate in where clause (group by)
@@ -2922,7 +2967,7 @@ public class ParserTest extends FrontendTestBase {
         "select c, b, c from t where group by a, b\n" +
         "                            ^\n" +
         "Encountered: GROUP\n" +
-        "Expected: CASE, CAST, EXISTS, FALSE, " +
+        "Expected: CASE, CAST, DEFAULT, EXISTS, FALSE, " +
         "IF, INTERVAL, NOT, NULL, TRUNCATE, TRUE, IDENTIFIER\n");
 
     // unmatched string literal starting with "
@@ -2983,7 +3028,7 @@ public class ParserTest extends FrontendTestBase {
         "...c,c,c,c,c,c,c,c,cd,c,d,d, ,c, from t\n" +
         "                             ^\n" +
         "Encountered: COMMA\n" +
-        "Expected: CASE, CAST, EXISTS, FALSE, " +
+        "Expected: CASE, CAST, DEFAULT, EXISTS, FALSE, " +
         "IF, INTERVAL, NOT, NULL, TRUNCATE, TRUE, IDENTIFIER\n");
 
     // Parsing identifiers that have different names printed as EXPECTED
@@ -3004,7 +3049,7 @@ public class ParserTest extends FrontendTestBase {
         "USE ` `\n" +
         "    ^\n" +
         "Encountered: EMPTY IDENTIFIER\n" +
-        "Expected: IDENTIFIER\n");
+        "Expected: DEFAULT, IDENTIFIER\n");
 
     // Expecting = token
     ParserError("SET foo",

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/testdata/datasets/functional/functional_schema_template.sql
----------------------------------------------------------------------
diff --git a/testdata/datasets/functional/functional_schema_template.sql 
b/testdata/datasets/functional/functional_schema_template.sql
index be6b232..1940c31 100644
--- a/testdata/datasets/functional/functional_schema_template.sql
+++ b/testdata/datasets/functional/functional_schema_template.sql
@@ -551,20 +551,20 @@ DROP TABLE IF EXISTS 
{db_name}{db_suffix}.{table_name}_idx;
 
 CREATE TABLE {db_name}{db_suffix}.{table_name}_idx (
   kudu_idx BIGINT PRIMARY KEY,
-  id INT,
-  bool_col BOOLEAN,
-  tinyint_col TINYINT,
-  smallint_col SMALLINT,
-  int_col INT,
-  bigint_col BIGINT,
-  float_col FLOAT,
-  double_col DOUBLE,
-  date_string_col STRING,
-  string_col STRING,
-  timestamp_col STRING,
-  year INT,
-  month INT,
-  day INT
+  id INT NULL,
+  bool_col BOOLEAN NULL,
+  tinyint_col TINYINT NULL,
+  smallint_col SMALLINT NULL,
+  int_col INT NULL,
+  bigint_col BIGINT NULL,
+  float_col FLOAT NULL,
+  double_col DOUBLE NULL,
+  date_string_col STRING NULL,
+  string_col STRING NULL,
+  timestamp_col STRING NULL,
+  year INT NULL,
+  month INT NULL,
+  day INT NULL
 )
 DISTRIBUTE BY HASH (kudu_idx) INTO 3 BUCKETS STORED AS KUDU;
 CREATE VIEW {db_name}{db_suffix}.{table_name} AS
@@ -763,8 +763,8 @@ delimited fields terminated by ','  escaped by '\\'
 DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
 create table {db_name}{db_suffix}.{table_name} (
   id bigint primary key,
-  name string,
-  zip int
+  name string null,
+  zip int null
 )
 distribute by range(id) (partition values <= 1003, partition 1003 < values <= 
1007,
 partition 1007 < values) stored as kudu;
@@ -1310,7 +1310,8 @@ OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
 ---- CREATE_KUDU
 DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
 create table {db_name}{db_suffix}.{table_name} (
-  a string primary key, b string, c string, d int, e double, f string, g string
+  a string primary key, b string null, c string null, d int null, e double 
null,
+  f string null, g string null
 )
 distribute by hash(a) into 3 buckets stored as kudu;
 ====
@@ -1412,10 +1413,10 @@ LOAD DATA LOCAL INPATH 
'{impala_home}/testdata/ImpalaDemoDataset/DEC_00_SF3_P077
 DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
 create table {db_name}{db_suffix}.{table_name} (
   id string primary key,
-  zip string,
-  description1 string,
-  description2 string,
-  income int)
+  zip string null,
+  description1 string null,
+  description2 string null,
+  income int null)
 distribute by range(id)
 (partition values <= '8600000US01475',
  partition '8600000US01475' < values <= '8600000US63121',

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/testdata/workloads/functional-query/queries/QueryTest/kudu_delete.test
----------------------------------------------------------------------
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/kudu_delete.test 
b/testdata/workloads/functional-query/queries/QueryTest/kudu_delete.test
index ac1bcc4..9e2a924 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/kudu_delete.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/kudu_delete.test
@@ -1,8 +1,8 @@
 ====
 ---- QUERY
 create table tdata
-  (id int primary key, valf float, vali bigint, valv string, valb boolean, 
valt tinyint,
-      vals smallint, vald double)
+  (id int primary key, valf float null, vali bigint null, valv string null,
+   valb boolean null, valt tinyint null, vals smallint null, vald double null)
   DISTRIBUTE BY RANGE (PARTITION VALUES < 100, PARTITION 100 <= VALUES < 1000,
   PARTITION 1000 <= VALUES <= 10000) STORED AS KUDU
 ---- RESULTS
@@ -297,8 +297,9 @@ INT,FLOAT,BIGINT,STRING,BOOLEAN,TINYINT,SMALLINT,DOUBLE
 ====
 ---- QUERY
 create table multiple_key_cols
-  (string_col string, bigint_col bigint, tinyint_col tinyint, smallint_col 
smallint,
-   bool_col boolean, int_col int, double_col double, float_col float,
+  (string_col string, bigint_col bigint, tinyint_col tinyint,
+   smallint_col smallint, bool_col boolean null, int_col int null,
+   double_col double null, float_col float null,
    primary key (string_col, bigint_col, tinyint_col, smallint_col))
   DISTRIBUTE BY HASH (string_col) INTO 16 BUCKETS STORED AS KUDU
 ====

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/testdata/workloads/functional-query/queries/QueryTest/kudu_insert.test
----------------------------------------------------------------------
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/kudu_insert.test 
b/testdata/workloads/functional-query/queries/QueryTest/kudu_insert.test
index 44cae4a..759dc5e 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/kudu_insert.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/kudu_insert.test
@@ -1,8 +1,8 @@
 ====
 ---- QUERY
 create table tdata
-  (id int primary key, valf float, vali bigint, valv string, valb boolean, 
valt tinyint,
-   vals smallint, vald double)
+  (id int primary key, valf float null, vali bigint null, valv string null,
+   valb boolean null, valt tinyint null, vals smallint null, vald double null)
   DISTRIBUTE BY RANGE (PARTITION VALUES < 10, PARTITION 10 <= VALUES < 30,
   PARTITION 30 <= VALUES) STORED AS KUDU
 ---- RESULTS

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/testdata/workloads/functional-query/queries/QueryTest/kudu_update.test
----------------------------------------------------------------------
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/kudu_update.test 
b/testdata/workloads/functional-query/queries/QueryTest/kudu_update.test
index fe0a6b1..27120f5 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/kudu_update.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/kudu_update.test
@@ -1,8 +1,9 @@
 ====
 ---- QUERY
 create table tdata
-  (id int primary key, name string, valf float, vali bigint, valv string, valb 
boolean,
-   valt tinyint, vals smallint, vald double)
+  (id int primary key, name string null, valf float null, vali bigint null,
+   valv string null, valb boolean null, valt tinyint null, vals smallint null,
+   vald double null)
   DISTRIBUTE BY RANGE (PARTITION VALUES < 10, PARTITION 10 <= VALUES < 30,
   PARTITION 30 <= VALUES <= 10000) STORED AS KUDU
 ---- RESULTS
@@ -337,4 +338,4 @@ update tdata set vali = -1
 ---- RUNTIME_PROFILE
 NumModifiedRows: 7300
 NumRowErrors: 0
-====
\ No newline at end of file
+====

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/testdata/workloads/functional-query/queries/QueryTest/kudu_upsert.test
----------------------------------------------------------------------
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/kudu_upsert.test 
b/testdata/workloads/functional-query/queries/QueryTest/kudu_upsert.test
index 5b5752a..0f117f1 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/kudu_upsert.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/kudu_upsert.test
@@ -1,8 +1,9 @@
 ====
 ---- QUERY
 create table tdata
-  (id int primary key, name string, valf float, vali bigint, valv string, valb 
boolean,
-   valt tinyint, vals smallint, vald double)
+  (id int primary key, name string null, valf float null, vali bigint null,
+   valv string null, valb boolean null, valt tinyint null, vals smallint null,
+   vald double null)
   DISTRIBUTE BY RANGE (PARTITION VALUES < 10, PARTITION 10 <= VALUES < 30,
   PARTITION 30 <= VALUES) STORED AS KUDU
 ---- RESULTS
@@ -389,8 +390,8 @@ NumRowErrors: 0
 ---- QUERY
 create table multiple_key_cols
   (string_col string, bigint_col bigint, tinyint_col tinyint, smallint_col 
smallint,
-   bool_col boolean, int_col int, double_col double, float_col float,
-   primary key (string_col, bigint_col, tinyint_col, smallint_col))
+   bool_col boolean null, int_col int null, double_col double null,
+   float_col float null, primary key (string_col, bigint_col, tinyint_col, 
smallint_col))
   DISTRIBUTE BY HASH (string_col) INTO 16 BUCKETS STORED AS KUDU
 ====
 ---- QUERY
@@ -488,4 +489,4 @@ upsert into table multiple_key_cols
 (string_col, tinyint_col, smallint_col) values ('a', 1, 1)
 ---- CATCH
 All primary key columns must be specified for UPSERTing into Kudu tables. 
Missing columns are: bigint_col
-====
\ No newline at end of file
+====

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/tests/query_test/test_kudu.py
----------------------------------------------------------------------
diff --git a/tests/query_test/test_kudu.py b/tests/query_test/test_kudu.py
index 908fc54..996931d 100644
--- a/tests/query_test/test_kudu.py
+++ b/tests/query_test/test_kudu.py
@@ -64,6 +64,27 @@ class TestKuduOperations(KuduTestSuite):
   def test_kudu_stats(self, vector, unique_database):
     self.run_test_case('QueryTest/kudu_stats', vector, use_db=unique_database)
 
+  def test_kudu_column_options(self, cursor, kudu_client, unique_database):
+    encodings = ["ENCODING PLAIN_ENCODING", ""]
+    compressions = ["COMPRESSION SNAPPY", ""]
+    nullability = ["NOT NULL", "NULL", ""]
+    defaults = ["DEFAULT 1", ""]
+    blocksizes = ["BLOCK_SIZE 32768", ""]
+    indx = 1
+    for encoding in encodings:
+      for compression in compressions:
+        for default in defaults:
+          for blocksize in blocksizes:
+            for nullable in nullability:
+              impala_tbl_name = "test_column_options_%s" % str(indx)
+              cursor.execute("""CREATE TABLE %s.%s (a INT PRIMARY KEY
+                  %s %s %s %s, b INT %s %s %s %s %s) DISTRIBUTE BY HASH (a) 
INTO 3
+                  BUCKETS STORED AS KUDU""" % (unique_database, 
impala_tbl_name,
+                  encoding, compression, default, blocksize, nullable, 
encoding,
+                  compression, default, blocksize))
+              indx = indx + 1
+              kudu_tbl_name = "impala::%s.%s" % (unique_database, 
impala_tbl_name)
+              assert kudu_client.table_exists(kudu_tbl_name)
 
 class TestCreateExternalTable(KuduTestSuite):
 
@@ -228,13 +249,14 @@ class TestShowCreateTable(KuduTestSuite):
 
   def test_primary_key_and_distribution(self, cursor):
     # TODO: Add test cases with column comments once KUDU-1711 is fixed.
+    # TODO: Add case with BLOCK_SIZE
     self.assert_show_create_equals(cursor,
         """
         CREATE TABLE {table} (c INT PRIMARY KEY)
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS STORED AS KUDU""",
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
+          c INT NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
           PRIMARY KEY (c)
         )
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS
@@ -243,14 +265,14 @@ class TestShowCreateTable(KuduTestSuite):
             db=cursor.conn.db_name, kudu_addr=KUDU_MASTER_HOSTS))
     self.assert_show_create_equals(cursor,
         """
-        CREATE TABLE {table} (c INT PRIMARY KEY, d STRING)
+        CREATE TABLE {table} (c INT PRIMARY KEY, d STRING NULL)
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS, RANGE (c)
         (PARTITION VALUES <= 1, PARTITION 1 < VALUES <= 2,
          PARTITION 2 < VALUES) STORED AS KUDU""",
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
-          d STRING,
+          c INT NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
+          d STRING NULL ENCODING AUTO_ENCODING COMPRESSION DEFAULT_COMPRESSION,
           PRIMARY KEY (c)
         )
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS, RANGE (c) (...)
@@ -259,11 +281,11 @@ class TestShowCreateTable(KuduTestSuite):
             db=cursor.conn.db_name, kudu_addr=KUDU_MASTER_HOSTS))
     self.assert_show_create_equals(cursor,
         """
-        CREATE TABLE {table} (c INT, PRIMARY KEY (c))
+        CREATE TABLE {table} (c INT ENCODING PLAIN_ENCODING, PRIMARY KEY (c))
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS STORED AS KUDU""",
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
+          c INT NOT NULL ENCODING PLAIN_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
           PRIMARY KEY (c)
         )
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS
@@ -272,14 +294,14 @@ class TestShowCreateTable(KuduTestSuite):
             db=cursor.conn.db_name, kudu_addr=KUDU_MASTER_HOSTS))
     self.assert_show_create_equals(cursor,
         """
-        CREATE TABLE {table} (c INT, d STRING, PRIMARY KEY(c, d))
+        CREATE TABLE {table} (c INT COMPRESSION LZ4, d STRING, PRIMARY KEY(c, 
d))
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS, HASH (d) INTO 3 BUCKETS,
         RANGE (c, d) (PARTITION VALUE = (1, 'aaa'), PARTITION VALUE = (2, 
'bbb'))
         STORED AS KUDU""",
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
-          d STRING,
+          c INT NOT NULL ENCODING AUTO_ENCODING COMPRESSION LZ4,
+          d STRING NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
           PRIMARY KEY (c, d)
         )
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS, HASH (d) INTO 3 BUCKETS, RANGE 
(c, d) (...)
@@ -288,14 +310,14 @@ class TestShowCreateTable(KuduTestSuite):
             db=cursor.conn.db_name, kudu_addr=KUDU_MASTER_HOSTS))
     self.assert_show_create_equals(cursor,
         """
-        CREATE TABLE {table} (c INT, d STRING, e INT, PRIMARY KEY(c, d))
+        CREATE TABLE {table} (c INT, d STRING, e INT NULL DEFAULT 10, PRIMARY 
KEY(c, d))
         DISTRIBUTE BY RANGE (c) (PARTITION VALUES <= 1, PARTITION 1 < VALUES 
<= 2,
         PARTITION 2 < VALUES <= 3, PARTITION 3 < VALUES) STORED AS KUDU""",
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
-          d STRING,
-          e INT,
+          c INT NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
+          d STRING NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
+          e INT NULL ENCODING AUTO_ENCODING COMPRESSION DEFAULT_COMPRESSION 
DEFAULT 10,
           PRIMARY KEY (c, d)
         )
         DISTRIBUTE BY RANGE (c) (...)
@@ -316,7 +338,7 @@ class TestShowCreateTable(KuduTestSuite):
         TBLPROPERTIES ({props})""".format(props=props),
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
+          c INT NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
           PRIMARY KEY (c)
         )
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS
@@ -335,7 +357,7 @@ class TestShowCreateTable(KuduTestSuite):
         TBLPROPERTIES ({props})""".format(props=props),
         """
         CREATE TABLE {db}.{{table}} (
-          c INT,
+          c INT NOT NULL ENCODING AUTO_ENCODING COMPRESSION 
DEFAULT_COMPRESSION,
           PRIMARY KEY (c)
         )
         DISTRIBUTE BY HASH (c) INTO 3 BUCKETS

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/3db5ced4/tests/shell/test_shell_commandline.py
----------------------------------------------------------------------
diff --git a/tests/shell/test_shell_commandline.py 
b/tests/shell/test_shell_commandline.py
index d880c2d..5b9859c 100644
--- a/tests/shell/test_shell_commandline.py
+++ b/tests/shell/test_shell_commandline.py
@@ -478,7 +478,7 @@ class TestImpalaShell(ImpalaTestSuite):
   def test_kudu_dml_reporting(self, unique_database):
     db = unique_database
     run_impala_shell_cmd('--query="create table %s.dml_test (id int primary 
key, '\
-        'age int) distribute by hash(id) into 2 buckets stored as kudu"' % db)
+        'age int null) distribute by hash(id) into 2 buckets stored as kudu"' 
% db)
 
     self._validate_dml_stmt("insert into %s.dml_test (id) values (7), (7)" % 
db, 1, 1)
     self._validate_dml_stmt("insert into %s.dml_test (id) values (7)" % db, 0, 
1)

Reply via email to