This is an automated email from the ASF dual-hosted git repository.

krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new c7fdd45  HIVE-25652: Add constraints in result of “SHOW CREATE TABLE ” 
(Soumyakanti Das, reviewed by Krisztian Kasa)
c7fdd45 is described below

commit c7fdd459305f4bf6913dc4bed7e8df8c7bf9e458
Author: Soumyakanti Das <soumyakanti.das...@gmail.com>
AuthorDate: Sun Dec 5 23:23:46 2021 -0800

    HIVE-25652: Add constraints in result of “SHOW CREATE TABLE ” (Soumyakanti 
Das, reviewed by Krisztian Kasa)
---
 .../create/show/ShowCreateTableOperation.java      |  24 +-
 .../desc/formatter/JsonDescTableFormatter.java     |  12 +-
 .../desc/formatter/TextDescTableFormatter.java     |  28 +-
 .../apache/hadoop/hive/ql/exec/DDLPlanUtils.java   | 333 +++++++++++----------
 .../apache/hadoop/hive/ql/exec/ExplainTask.java    |   3 +-
 .../hadoop/hive/ql/metadata/CheckConstraint.java   |  56 +++-
 .../hadoop/hive/ql/metadata/DefaultConstraint.java |  19 +-
 .../hadoop/hive/ql/metadata/ForeignKeyInfo.java    |  17 +-
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   |  12 +-
 .../hadoop/hive/ql/metadata/NotNullConstraint.java |  16 +-
 .../hadoop/hive/ql/metadata/PrimaryKeyInfo.java    |  46 ++-
 .../hive/ql/metadata/TableConstraintsInfo.java     |  12 +-
 .../hadoop/hive/ql/metadata/UniqueConstraint.java  |  17 +-
 .../queries/clientpositive/show_create_table.q     |  44 +++
 .../llap/constraints_explain_ddl.q.out             | 172 +++++------
 .../clientpositive/llap/show_create_table.q.out    | 163 ++++++++++
 16 files changed, 663 insertions(+), 311 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java
index 4788b65..456ba0b 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java
@@ -22,6 +22,13 @@ package org.apache.hadoop.hive.ql.ddl.table.create.show;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
@@ -45,9 +52,20 @@ public class ShowCreateTableOperation extends 
DDLOperation<ShowCreateTableDesc>
     try (DataOutputStream outStream = ShowUtils.getOutputStream(new 
Path(desc.getResFile()), context)) {
       Table table = context.getDb().getTable(desc.getDatabaseName(), 
desc.getTableName());
       DDLPlanUtils ddlObj = new DDLPlanUtils();
-      String command = table.isView() ? ddlObj.getCreateViewCommand(table, 
desc.isRelative())
-          : ddlObj.getCreateTableCommand(table, desc.isRelative());
-
+      String command;
+      if (table.isView()) {
+        command = ddlObj.getCreateViewCommand(table, desc.isRelative());
+      } else {
+        List<String> commands = new ArrayList<>();
+        commands.add(ddlObj.getCreateTableCommand(table, desc.isRelative()));
+        String primaryKeyStmt = 
ddlObj.getAlterTableStmtPrimaryKeyConstraint(table.getPrimaryKeyInfo());
+        if (primaryKeyStmt != null) {
+          commands.add(primaryKeyStmt);
+        }
+        commands.addAll(ddlObj.populateConstraints(table,
+          new HashSet<>(Collections.singletonList(table.getTableName()))));
+        command = String.join("\n", commands);
+      }
       outStream.write(command.getBytes(StandardCharsets.UTF_8));
       return 0;
     } catch (IOException e) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/JsonDescTableFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/JsonDescTableFormatter.java
index 3318463..1444eb2 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/JsonDescTableFormatter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/JsonDescTableFormatter.java
@@ -258,22 +258,22 @@ public class JsonDescTableFormatter extends 
DescTableFormatter {
         }).collect(Collectors.toList()));
       }
     }
-    if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(table.getPrimaryKeyInfo())) {
+    if (PrimaryKeyInfo.isNotEmpty(table.getPrimaryKeyInfo())) {
       builder.put("primaryKeyInfo", table.getPrimaryKeyInfo());
     }
-    if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(table.getForeignKeyInfo())) {
+    if (ForeignKeyInfo.isNotEmpty(table.getForeignKeyInfo())) {
       builder.put("foreignKeyInfo", table.getForeignKeyInfo());
     }
-    if (UniqueConstraint.isUniqueConstraintNotEmpty(table.getUniqueKeyInfo())) 
{
+    if (UniqueConstraint.isNotEmpty(table.getUniqueKeyInfo())) {
       builder.put("uniqueConstraintInfo", table.getUniqueKeyInfo());
     }
-    if 
(NotNullConstraint.isNotNullConstraintNotEmpty(table.getNotNullConstraint())) {
+    if (NotNullConstraint.isNotEmpty(table.getNotNullConstraint())) {
       builder.put("notNullConstraintInfo", table.getNotNullConstraint());
     }
-    if 
(DefaultConstraint.isCheckConstraintNotEmpty(table.getDefaultConstraint())) {
+    if (DefaultConstraint.isNotEmpty(table.getDefaultConstraint())) {
       builder.put("defaultConstraintInfo", table.getDefaultConstraint());
     }
-    if (CheckConstraint.isCheckConstraintNotEmpty(table.getCheckConstraint())) 
{
+    if (CheckConstraint.isNotEmpty(table.getCheckConstraint())) {
       builder.put("checkConstraintInfo", table.getCheckConstraint());
     }
     if (table.getStorageHandlerInfo() != null) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java
index 1976849..a0d0ee8 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java
@@ -389,27 +389,27 @@ class TextDescTableFormatter extends DescTableFormatter {
     StringBuilder constraintsInfo = new 
StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 
     constraintsInfo.append(LINE_DELIM).append("# 
Constraints").append(LINE_DELIM);
-    if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(table.getPrimaryKeyInfo())) {
+    if (PrimaryKeyInfo.isNotEmpty(table.getPrimaryKeyInfo())) {
       constraintsInfo.append(LINE_DELIM).append("# Primary 
Key").append(LINE_DELIM);
       getPrimaryKeyInformation(constraintsInfo, table.getPrimaryKeyInfo());
     }
-    if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(table.getForeignKeyInfo())) {
+    if (ForeignKeyInfo.isNotEmpty(table.getForeignKeyInfo())) {
       constraintsInfo.append(LINE_DELIM).append("# Foreign 
Keys").append(LINE_DELIM);
       getForeignKeysInformation(constraintsInfo, table.getForeignKeyInfo());
     }
-    if (UniqueConstraint.isUniqueConstraintNotEmpty(table.getUniqueKeyInfo())) 
{
+    if (UniqueConstraint.isNotEmpty(table.getUniqueKeyInfo())) {
       constraintsInfo.append(LINE_DELIM).append("# Unique 
Constraints").append(LINE_DELIM);
       getUniqueConstraintsInformation(constraintsInfo, 
table.getUniqueKeyInfo());
     }
-    if 
(NotNullConstraint.isNotNullConstraintNotEmpty(table.getNotNullConstraint())) {
+    if (NotNullConstraint.isNotEmpty(table.getNotNullConstraint())) {
       constraintsInfo.append(LINE_DELIM).append("# Not Null 
Constraints").append(LINE_DELIM);
       getNotNullConstraintsInformation(constraintsInfo, 
table.getNotNullConstraint());
     }
-    if 
(DefaultConstraint.isCheckConstraintNotEmpty(table.getDefaultConstraint())) {
+    if (DefaultConstraint.isNotEmpty(table.getDefaultConstraint())) {
       constraintsInfo.append(LINE_DELIM).append("# Default 
Constraints").append(LINE_DELIM);
       getDefaultConstraintsInformation(constraintsInfo, 
table.getDefaultConstraint());
     }
-    if (CheckConstraint.isCheckConstraintNotEmpty(table.getCheckConstraint())) 
{
+    if (CheckConstraint.isNotEmpty(table.getCheckConstraint())) {
       constraintsInfo.append(LINE_DELIM).append("# Check 
Constraints").append(LINE_DELIM);
       getCheckConstraintsInformation(constraintsInfo, 
table.getCheckConstraint());
     }
@@ -529,8 +529,8 @@ class TextDescTableFormatter extends DescTableFormatter {
     if (CollectionUtils.isNotEmpty(columns)) {
       for (CheckConstraintCol column : columns) {
         String[] fields = new String[2];
-        fields[0] = "Column Name:" + column.colName;
-        fields[1] = "Check Value:" + column.checkExpression;
+        fields[0] = "Column Name:" + column.getColName();
+        fields[1] = "Check Value:" + column.getCheckExpression();
         formatOutput(fields, constraintsInfo);
       }
     }
@@ -559,27 +559,27 @@ class TextDescTableFormatter extends DescTableFormatter {
     if (table.getTableConstraintsInfo().isTableConstraintsInfoNotEmpty()) {
       out.write(("Constraints").getBytes(StandardCharsets.UTF_8));
       out.write(Utilities.tabCode);
-      if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(table.getPrimaryKeyInfo())) {
+      if (PrimaryKeyInfo.isNotEmpty(table.getPrimaryKeyInfo())) {
         
out.write(table.getPrimaryKeyInfo().toString().getBytes(StandardCharsets.UTF_8));
         out.write(Utilities.newLineCode);
       }
-      if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(table.getForeignKeyInfo())) {
+      if (ForeignKeyInfo.isNotEmpty(table.getForeignKeyInfo())) {
         
out.write(table.getForeignKeyInfo().toString().getBytes(StandardCharsets.UTF_8));
         out.write(Utilities.newLineCode);
       }
-      if 
(UniqueConstraint.isUniqueConstraintNotEmpty(table.getUniqueKeyInfo())) {
+      if (UniqueConstraint.isNotEmpty(table.getUniqueKeyInfo())) {
         
out.write(table.getUniqueKeyInfo().toString().getBytes(StandardCharsets.UTF_8));
         out.write(Utilities.newLineCode);
       }
-      if 
(NotNullConstraint.isNotNullConstraintNotEmpty(table.getNotNullConstraint())) {
+      if (NotNullConstraint.isNotEmpty(table.getNotNullConstraint())) {
         
out.write(table.getNotNullConstraint().toString().getBytes(StandardCharsets.UTF_8));
         out.write(Utilities.newLineCode);
       }
-      if 
(DefaultConstraint.isCheckConstraintNotEmpty(table.getDefaultConstraint())) {
+      if (DefaultConstraint.isNotEmpty(table.getDefaultConstraint())) {
         
out.write(table.getDefaultConstraint().toString().getBytes(StandardCharsets.UTF_8));
         out.write(Utilities.newLineCode);
       }
-      if 
(CheckConstraint.isCheckConstraintNotEmpty(table.getCheckConstraint())) {
+      if (CheckConstraint.isNotEmpty(table.getCheckConstraint())) {
         
out.write(table.getCheckConstraint().toString().getBytes(StandardCharsets.UTF_8));
         out.write(Utilities.newLineCode);
       }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
index 7a7d2a5..bbd342a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
@@ -52,6 +52,7 @@ import 
org.apache.hadoop.hive.ql.metadata.DefaultConstraint.DefaultConstraintCol
 import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.metadata.NotNullConstraint;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
@@ -75,7 +76,6 @@ import java.util.Base64;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
@@ -115,6 +115,9 @@ public class DDLPlanUtils {
   private static final String COL_TYPE = "COL_TYPE";
   private static final String SQL = "SQL";
   private static final String COMMENT_SQL = "COMMENT_SQL";
+  private static final String ENABLE = "ENABLE";
+  private static final String RELY = "RELY";
+  private static final String VALIDATE = "VALIDATE";
   private static final String HIVE_DEFAULT_PARTITION = 
"__HIVE_DEFAULT_PARTITION__";
   private static final String BASE_64_VALUE = "BASE_64";
   private static final String numNulls = "'numNulls'='";
@@ -131,89 +134,94 @@ public class DDLPlanUtils {
   private static final String CREATE_DATABASE_STMT = "CREATE DATABASE IF NOT 
EXISTS <" + DATABASE_NAME + ">;";
 
   private final String CREATE_TABLE_TEMPLATE =
-      "CREATE <" + TEMPORARY + "><" + EXTERNAL + ">TABLE <if(" + DATABASE_NAME 
+ ")>`<" + DATABASE_NAME + ">`.<endif>"
-          + "`<" + TABLE_NAME + ">`(\n" +
-          "<" + LIST_COLUMNS + ">)\n" +
-          "<" + COMMENT + ">\n" +
-          "<" + PARTITIONS + ">\n" +
-          "<" + PARTITIONS_BY_SPEC + ">\n" +
-          "<" + BUCKETS + ">\n" +
-          "<" + SKEWED + ">\n" +
-          "<" + ROW_FORMAT + ">\n" +
-          "<" + LOCATION_BLOCK + ">" +
-          "TBLPROPERTIES (\n" +
-          "<" + PROPERTIES + ">)";
+    "CREATE <" + TEMPORARY + "><" + EXTERNAL + ">TABLE <if(" + DATABASE_NAME + 
")>`<" + DATABASE_NAME + ">`.<endif>"
+      + "`<" + TABLE_NAME + ">`(\n" +
+      "<" + LIST_COLUMNS + ">)\n" +
+      "<" + COMMENT + ">\n" +
+      "<" + PARTITIONS + ">\n" +
+      "<" + PARTITIONS_BY_SPEC + ">\n" +
+      "<" + BUCKETS + ">\n" +
+      "<" + SKEWED + ">\n" +
+      "<" + ROW_FORMAT + ">\n" +
+      "<" + LOCATION_BLOCK + ">" +
+      "TBLPROPERTIES (\n" +
+      "<" + PROPERTIES + ">)";
 
   private static final String CREATE_VIEW_TEMPLATE =
-      "CREATE VIEW <if(" + DATABASE_NAME + ")>`<" + DATABASE_NAME + 
">`.<endif>`<" + TABLE_NAME +
-          ">`<" + PARTITIONS + "> AS <" + SQL +">";
+    "CREATE VIEW <if(" + DATABASE_NAME + ")>`<" + DATABASE_NAME + 
">`.<endif>`<" + TABLE_NAME +
+      ">`<" + PARTITIONS + "> AS <" + SQL + ">";
 
   private final String CREATE_TABLE_TEMPLATE_LOCATION = "LOCATION\n" +
-      "<" + LOCATION + ">\n";
+    "<" + LOCATION + ">\n";
 
   private final Set<String> PROPERTIES_TO_IGNORE_AT_TBLPROPERTIES = Sets.union(
-      ImmutableSet.of("TEMPORARY", "EXTERNAL", "comment", 
"SORTBUCKETCOLSPREFIX", META_TABLE_STORAGE, TABLE_IS_CTAS, CTAS_LEGACY_CONFIG),
-      new HashSet<String>(StatsSetupConst.TABLE_PARAMS_STATS_KEYS));
+    ImmutableSet.of("TEMPORARY", "EXTERNAL", "comment", 
"SORTBUCKETCOLSPREFIX", META_TABLE_STORAGE, TABLE_IS_CTAS, CTAS_LEGACY_CONFIG),
+    new HashSet<String>(StatsSetupConst.TABLE_PARAMS_STATS_KEYS));
 
   private final String ALTER_TABLE_CREATE_PARTITION = "<if(" + COMMENT_SQL + 
")><" + COMMENT_SQL + "> <endif>" + "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME +
-      "> ADD IF NOT EXISTS PARTITION (<" + PARTITION +
-      ">);";
+    + DATABASE_NAME + ">.<" + TABLE_NAME +
+    "> ADD IF NOT EXISTS PARTITION (<" + PARTITION +
+    ">);";
 
   private final String ALTER_TABLE_UPDATE_STATISTICS_TABLE_COLUMN = "ALTER 
TABLE <"
-      + DATABASE_NAME + ">.<" +
-      TABLE_NAME + "> UPDATE STATISTICS FOR COLUMN <"
-      + COLUMN_NAME + "> SET(<" + TBLPROPERTIES + "> );";
+    + DATABASE_NAME + ">.<" +
+    TABLE_NAME + "> UPDATE STATISTICS FOR COLUMN <"
+    + COLUMN_NAME + "> SET(<" + TBLPROPERTIES + "> );";
 
-  private final String ALTER_TABLE_UPDATE_STATISTICS_PARTITION_COLUMN = "<if(" 
+ COMMENT_SQL + ")><" + COMMENT_SQL + "> <endif>" + "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME +
-      "> PARTITION (<" + PARTITION_NAME +
-      ">) UPDATE STATISTICS FOR COLUMN <"
-      + COLUMN_NAME + "> SET(<" + TBLPROPERTIES + "> );";
+  private final String ALTER_TABLE_UPDATE_STATISTICS_PARTITION_COLUMN =
+    "<if(" + COMMENT_SQL + ")><" + COMMENT_SQL + "> <endif>" + "ALTER TABLE <"
+    + DATABASE_NAME + ">.<" + TABLE_NAME +
+    "> PARTITION (<" + PARTITION_NAME +
+    ">) UPDATE STATISTICS FOR COLUMN <"
+    + COLUMN_NAME + "> SET(<" + TBLPROPERTIES + "> );";
 
   private final String ALTER_TABLE_UPDATE_STATISTICS_TABLE_BASIC = "ALTER 
TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME +
-      "> UPDATE STATISTICS SET(<" + TBLPROPERTIES + "> );";
+    + DATABASE_NAME + ">.<" + TABLE_NAME +
+    "> UPDATE STATISTICS SET(<" + TBLPROPERTIES + "> );";
 
-  private final String ALTER_TABLE_UPDATE_STATISTICS_PARTITION_BASIC = "<if(" 
+ COMMENT_SQL + ")><" + COMMENT_SQL + "> <endif>" + "ALTER TABLE <"
+  private final String ALTER_TABLE_UPDATE_STATISTICS_PARTITION_BASIC =
+    "<if(" + COMMENT_SQL + ")><" + COMMENT_SQL + "> <endif>" + "ALTER TABLE <"
       + DATABASE_NAME + ">.<" + TABLE_NAME + "> PARTITION (<" +
       PARTITION_NAME + ">) UPDATE STATISTICS SET(<" + TBLPROPERTIES + "> );";
   private final String ALTER_TABLE_ADD_PRIMARY_KEY = "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME + "> ADD CONSTRAINT <" +
-      CONSTRAINT_NAME + "> PRIMARY KEY (<" + COL_NAMES + ">) DISABLE 
NOVALIDATE;";
+    + DATABASE_NAME + ">.<" + TABLE_NAME + "> ADD CONSTRAINT <" +
+    CONSTRAINT_NAME + "> PRIMARY KEY (<" + COL_NAMES + ">) <" + ENABLE + "> <" 
+ VALIDATE + "> <" + RELY + ">;";
 
   private final String ALTER_TABLE_ADD_FOREIGN_KEY = "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + CHILD_TABLE_NAME + "> ADD CONSTRAINT <"
-      + CONSTRAINT_NAME + "> FOREIGN KEY (<" + CHILD_COL_NAME + ">) REFERENCES 
<"
-      + DATABASE_NAME_FR + ">.<" + PARENT_TABLE_NAME + ">(<" + PARENT_COL_NAME 
+ ">) DISABLE NOVALIDATE RELY;";
+    + DATABASE_NAME + ">.<" + CHILD_TABLE_NAME + "> ADD CONSTRAINT <"
+    + CONSTRAINT_NAME + "> FOREIGN KEY (<" + CHILD_COL_NAME + ">) REFERENCES <"
+    + DATABASE_NAME_FR + ">.<" + PARENT_TABLE_NAME + ">(<" + PARENT_COL_NAME + 
">) <"
+    + ENABLE + "> <" + VALIDATE + "> <" + RELY + ">;";
 
   private final String ALTER_TABLE_ADD_UNIQUE_CONSTRAINT = "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME + "> ADD CONSTRAINT <" +
-      CONSTRAINT_NAME + "> UNIQUE (<" + COLUMN_NAME + ">) DISABLE NOVALIDATE;";
+    + DATABASE_NAME + ">.<" + TABLE_NAME + "> ADD CONSTRAINT <" +
+    CONSTRAINT_NAME + "> UNIQUE (<" + COLUMN_NAME + ">) <" + ENABLE + "> <" + 
VALIDATE + "> <" + RELY + ">;";
 
   private final String ALTER_TABLE_ADD_CHECK_CONSTRAINT = "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME +
-      "> ADD CONSTRAINT <" + CONSTRAINT_NAME + "> CHECK (<" +
-      CHECK_EXPRESSION + ">) DISABLE;";
+    + DATABASE_NAME + ">.<" + TABLE_NAME +
+    "> ADD CONSTRAINT <" + CONSTRAINT_NAME + "> CHECK (<" +
+    CHECK_EXPRESSION + ">) <" + ENABLE + "> <" + VALIDATE + "> <" + RELY + 
">;";
 
   private final String ALTER_TABLE_ADD_NOT_NULL_CONSTRAINT = "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME + "> CHANGE COLUMN < "
-      + COLUMN_NAME + "> <" + COLUMN_NAME +
-      "> <" + COL_TYPE + "> CONSTRAINT <" + CONSTRAINT_NAME + "> NOT NULL 
DISABLE;";
+    + DATABASE_NAME + ">.<" + TABLE_NAME + "> CHANGE COLUMN < "
+    + COLUMN_NAME + "> <" + COLUMN_NAME +
+    "> <" + COL_TYPE + "> CONSTRAINT <" + CONSTRAINT_NAME + "> NOT NULL <" + 
ENABLE + "> <" + VALIDATE
+    + "> <" + RELY + ">;";
 
   private final String ALTER_TABLE_ADD_DEFAULT_CONSTRAINT = "ALTER TABLE <"
-      + DATABASE_NAME + ">.<" + TABLE_NAME + "> CHANGE COLUMN < "
-      + COLUMN_NAME + "> <" + COLUMN_NAME +
-      "> <" + COL_TYPE + "> CONSTRAINT <" + CONSTRAINT_NAME + "> DEFAULT <" + 
DEFAULT_VALUE + "> DISABLE;";
+    + DATABASE_NAME + ">.<" + TABLE_NAME + "> CHANGE COLUMN < "
+    + COLUMN_NAME + "> <" + COLUMN_NAME +
+    "> <" + COL_TYPE + "> CONSTRAINT <" + CONSTRAINT_NAME + "> DEFAULT <" + 
DEFAULT_VALUE + "> <"
+    + ENABLE + "> <" + VALIDATE + "> <" + RELY + ">;";
 
   private final String EXIST_BIT_VECTORS = "-- BIT VECTORS PRESENT FOR <" + 
DATABASE_NAME + ">.<" + TABLE_NAME + "> " +
-      "FOR COLUMN <" + COLUMN_NAME + "> BUT THEY ARE NOT SUPPORTED YET. THE 
BASE64 VALUE FOR THE BITVECTOR IS <" +
-      BASE_64_VALUE +"> ";
+    "FOR COLUMN <" + COLUMN_NAME + "> BUT THEY ARE NOT SUPPORTED YET. THE 
BASE64 VALUE FOR THE BITVECTOR IS <" +
+    BASE_64_VALUE + "> ";
 
   private final String EXIST_BIT_VECTORS_PARTITIONED = "-- BIT VECTORS PRESENT 
FOR <" + DATABASE_NAME + ">.<" +
-      TABLE_NAME + "> PARTITION <" + PARTITION_NAME + "> FOR COLUMN <"
-      + COLUMN_NAME + "> BUT THEY ARE NOT SUPPORTED YET.THE BASE64 VALUE FOR 
THE BITVECTOR IS <" +
-      BASE_64_VALUE +"> ";
+    TABLE_NAME + "> PARTITION <" + PARTITION_NAME + "> FOR COLUMN <"
+    + COLUMN_NAME + "> BUT THEY ARE NOT SUPPORTED YET.THE BASE64 VALUE FOR THE 
BITVECTOR IS <" +
+    BASE_64_VALUE + "> ";
 
   /**
    * Returns the create database query for a give database name.
@@ -263,11 +271,10 @@ public class DDLPlanUtils {
     return StringUtils.join(ptParam, ",");
   }
 
-  public boolean checkIfDefaultPartition(String pt){
-    if(pt.contains(HIVE_DEFAULT_PARTITION)){
+  public boolean checkIfDefaultPartition(String pt) {
+    if (pt.contains(HIVE_DEFAULT_PARTITION)) {
       return true;
-    }
-    else {
+    } else {
       return false;
     }
   }
@@ -286,7 +293,7 @@ public class DDLPlanUtils {
     command.add(DATABASE_NAME, tb.getDbName());
     command.add(TABLE_NAME, tb.getTableName());
     command.add(PARTITION, getPartitionActualName(pt));
-    if(checkIfDefaultPartition(pt.getName())){
+    if (checkIfDefaultPartition(pt.getName())) {
       command.add(COMMENT_SQL, "--");
     }
     return command.render();
@@ -366,11 +373,11 @@ public class DDLPlanUtils {
       return;
     }
     DecimalColumnStatsData dc = cd.getDecimalStats();
-    if(dc.isSetHighValue()) {
+    if (dc.isSetHighValue()) {
       byte[] highValArr = 
setByteArrayToLongSize(dc.getHighValue().getUnscaled());
       ls.add(highValue + ByteBuffer.wrap(highValArr).getLong() + "E" + 
dc.getHighValue().getScale() + "'");
     }
-    if(dc.isSetLowValue()) {
+    if (dc.isSetLowValue()) {
       byte[] lowValArr = 
setByteArrayToLongSize(dc.getLowValue().getUnscaled());
       ls.add(lowValue + ByteBuffer.wrap(lowValArr).getLong() + "E" + 
dc.getLowValue().getScale() + "'");
     }
@@ -414,7 +421,7 @@ public class DDLPlanUtils {
     return null;
   }
 
-  public String addAllColStats(ColumnStatisticsData columnStatisticsData){
+  public String addAllColStats(ColumnStatisticsData columnStatisticsData) {
     List<String> temp = new ArrayList<>();
     addBinaryStats(columnStatisticsData, temp);
     addLongStats(columnStatisticsData, temp);
@@ -451,18 +458,18 @@ public class DDLPlanUtils {
    * @param tbl
    */
   public List<String> getAlterTableStmtTableStatsColsAll(Table tbl)
-      throws HiveException {
+    throws HiveException {
     List<String> alterTblStmt = new ArrayList<String>();
     List<String> accessedColumns = getTableColumnNames(tbl);
     List<ColumnStatisticsObj> tableColumnStatistics = 
Hive.get().getTableColumnStatistics(tbl.getDbName(),
-        tbl.getTableName(),
-        accessedColumns,
-        true);
+      tbl.getTableName(),
+      accessedColumns,
+      true);
     ColumnStatisticsObj[] columnStatisticsObj = 
tableColumnStatistics.toArray(new ColumnStatisticsObj[0]);
     for (int i = 0; i < columnStatisticsObj.length; i++) {
       
alterTblStmt.add(getAlterTableStmtCol(columnStatisticsObj[i].getStatsData(),
-          columnStatisticsObj[i].getColName(),
-          tbl.getTableName(), tbl.getDbName()));
+        columnStatisticsObj[i].getColName(),
+        tbl.getTableName(), tbl.getDbName()));
       String base64 = checkBitVectors(columnStatisticsObj[i].getStatsData());
       if (base64 != null) {
         ST command = new ST(EXIST_BIT_VECTORS);
@@ -487,14 +494,14 @@ public class DDLPlanUtils {
    * @return
    */
   public String getAlterTableStmtPartitionColStat(ColumnStatisticsData 
columnStatisticsData, String colName,
-      String tblName, String ptName, String dbName) {
+                                                  String tblName, String 
ptName, String dbName) {
     ST command = new ST(ALTER_TABLE_UPDATE_STATISTICS_PARTITION_COLUMN);
     command.add(DATABASE_NAME, dbName);
     command.add(TABLE_NAME, tblName);
     command.add(COLUMN_NAME, colName);
     command.add(PARTITION_NAME, ptName);
     command.add(TBLPROPERTIES, addAllColStats(columnStatisticsData));
-    if(checkIfDefaultPartition(ptName)){
+    if (checkIfDefaultPartition(ptName)) {
       command.add(COMMENT_SQL, "--");
     }
     return command.render();
@@ -510,17 +517,17 @@ public class DDLPlanUtils {
    * @param dbName
    */
   public List<String> 
getAlterTableStmtPartitionStatsColsAll(List<ColumnStatisticsObj> 
columnStatisticsObjList,
-      String tblName,
-      String ptName,
-      String dbName) {
+                                                             String tblName,
+                                                             String ptName,
+                                                             String dbName) {
     List<String> alterTableStmt = new ArrayList<String>();
     ColumnStatisticsObj[] columnStatisticsObj = 
columnStatisticsObjList.toArray(new ColumnStatisticsObj[0]);
     for (int i = 0; i < columnStatisticsObj.length; i++) {
       
alterTableStmt.add(getAlterTableStmtPartitionColStat(columnStatisticsObj[i].getStatsData(),
-          columnStatisticsObj[i].getColName(),
-          tblName,
-          ptName,
-          dbName));
+        columnStatisticsObj[i].getColName(),
+        tblName,
+        ptName,
+        dbName));
       String base64 = checkBitVectors(columnStatisticsObj[i].getStatsData());
       if (base64 != null) {
         ST command = new ST(EXIST_BIT_VECTORS_PARTITIONED);
@@ -535,7 +542,7 @@ public class DDLPlanUtils {
     return alterTableStmt;
   }
 
-  public String paramToValues(Map<String, String> parameters){
+  public String paramToValues(Map<String, String> parameters) {
     List<String> paramsToValue = new ArrayList<>();
     for (String s : req) {
       String p = parameters.get(s);
@@ -560,15 +567,15 @@ public class DDLPlanUtils {
     command.add(TABLE_NAME, pt.getTable().getTableName());
     command.add(PARTITION_NAME, getPartitionActualName(pt));
     command.add(TBLPROPERTIES, paramToValues(parameters));
-    if(checkIfDefaultPartition(pt.getName())){
+    if (checkIfDefaultPartition(pt.getName())) {
       command.add(COMMENT_SQL, "--");
     }
     return command.render();
   }
 
   public List<String> getDDLPlanForPartitionWithStats(Table table,
-      Map<String, List<Partition>> tableToPartitionList
-                                                     ) throws MetaException, 
HiveException {
+                                                      Map<String, 
List<Partition>> tableToPartitionList
+  ) throws MetaException, HiveException {
     List<String> alterTableStmt = new ArrayList<String>();
     String tableName = table.getTableName();
     for (Partition pt : tableToPartitionList.get(tableName)) {
@@ -581,16 +588,16 @@ public class DDLPlanUtils {
     List<String> columnNames = getTableColumnNames(table);
     tableToPartitionList.get(tableName).stream().forEach(p -> 
partNames.add(p.getName()));
     Map<String, List<ColumnStatisticsObj>> partitionColStats =
-        Hive.get().getPartitionColumnStatistics(databaseName,
-            tableName, partNames, columnNames,
-            true);
+      Hive.get().getPartitionColumnStatistics(databaseName,
+        tableName, partNames, columnNames,
+        true);
     Map<String, String> partitionToActualName = new HashMap<>();
     tableToPartitionList.get(tableName).stream().forEach(p -> 
partitionToActualName.put(p.getName(),
-        getPartitionActualName(p)));
+      getPartitionActualName(p)));
     for (String partitionName : partitionColStats.keySet()) {
       
alterTableStmt.addAll(getAlterTableStmtPartitionStatsColsAll(partitionColStats.get(partitionName),
-          tableName, partitionToActualName.get(partitionName),
-          databaseName));
+        tableName, partitionToActualName.get(partitionName),
+        databaseName));
     }
     return alterTableStmt;
   }
@@ -612,7 +619,7 @@ public class DDLPlanUtils {
   }
 
   public String getAlterTableStmtPrimaryKeyConstraint(PrimaryKeyInfo pr) {
-    if (!PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(pr)) {
+    if (!PrimaryKeyInfo.isNotEmpty(pr)) {
       return null;
     }
     ST command = new ST(ALTER_TABLE_ADD_PRIMARY_KEY);
@@ -620,19 +627,19 @@ public class DDLPlanUtils {
     command.add(DATABASE_NAME, pr.getDatabaseName());
     command.add(CONSTRAINT_NAME, pr.getConstraintName());
     command.add(COL_NAMES, String.join(",", pr.getColNames().values()));
+    command.add(ENABLE, pr.getEnable());
+    command.add(VALIDATE, pr.getValidate());
+    command.add(RELY, pr.getRely());
     return command.render();
   }
 
   public void getAlterTableStmtForeignKeyConstraint(ForeignKeyInfo fr, 
List<String> constraints, Set<String> allTableNames) {
-    if (!ForeignKeyInfo.isForeignKeyInfoNotEmpty(fr)) {
+    if (!ForeignKeyInfo.isNotEmpty(fr)) {
       return;
     }
     Map<String, List<ForeignKeyInfo.ForeignKeyCol>> all = fr.getForeignKeys();
     for (String key : all.keySet()) {
       for (ForeignKeyInfo.ForeignKeyCol fkc : all.get(key)) {
-        if (!allTableNames.contains(fkc.parentTableName)) {
-          continue;
-        }
         ST command = new ST(ALTER_TABLE_ADD_FOREIGN_KEY);
         command.add(CHILD_TABLE_NAME, fr.getChildTableName());
         command.add(DATABASE_NAME, fr.getChildDatabaseName());
@@ -641,13 +648,16 @@ public class DDLPlanUtils {
         command.add(DATABASE_NAME_FR, fkc.parentDatabaseName);
         command.add(PARENT_TABLE_NAME, fkc.parentTableName);
         command.add(PARENT_COL_NAME, fkc.parentColName);
+        command.add(ENABLE, fkc.enable);
+        command.add(VALIDATE, fkc.validate);
+        command.add(RELY, fkc.rely);
         constraints.add(command.render());
       }
     }
   }
 
   public void getAlterTableStmtUniqueConstraint(UniqueConstraint uq, 
List<String> constraints) {
-    if (!UniqueConstraint.isUniqueConstraintNotEmpty(uq)) {
+    if (!UniqueConstraint.isNotEmpty(uq)) {
       return;
     }
     Map<String, List<UniqueConstraint.UniqueConstraintCol>> uniqueConstraints 
= uq.getUniqueConstraints();
@@ -661,12 +671,15 @@ public class DDLPlanUtils {
         colNames.add(col.colName);
       }
       command.add(COLUMN_NAME, Joiner.on(",").join(colNames));
+      command.add(ENABLE, uniqueConstraints.get(key).get(0).enable);
+      command.add(VALIDATE, uniqueConstraints.get(key).get(0).validate);
+      command.add(RELY, uniqueConstraints.get(key).get(0).rely);
       constraints.add(command.render());
     }
   }
 
   public void getAlterTableStmtDefaultConstraint(DefaultConstraint dc, Table 
tb, List<String> constraints) {
-    if (!DefaultConstraint.isCheckConstraintNotEmpty(dc)) {
+    if (!DefaultConstraint.isNotEmpty(dc)) {
       return;
     }
     Map<String, String> colType = getTableColumnsToType(tb);
@@ -680,13 +693,16 @@ public class DDLPlanUtils {
         command.add(COLUMN_NAME, col.colName);
         command.add(COL_TYPE, colType.get(col.colName));
         command.add(DEFAULT_VALUE, col.defaultVal);
+        command.add(ENABLE, col.enable);
+        command.add(VALIDATE, col.validate);
+        command.add(RELY, col.rely);
         constraints.add(command.render());
       }
     }
   }
 
   public void getAlterTableStmtCheckConstraint(CheckConstraint ck, 
List<String> constraints) {
-    if (!CheckConstraint.isCheckConstraintNotEmpty(ck)) {
+    if (!CheckConstraint.isNotEmpty(ck)) {
       return;
     }
     Map<String, List<CheckConstraint.CheckConstraintCol>> checkConstraints = 
ck.getCheckConstraints();
@@ -698,7 +714,10 @@ public class DDLPlanUtils {
           command.add(DATABASE_NAME, ck.getDatabaseName());
           command.add(TABLE_NAME, ck.getTableName());
           command.add(CONSTRAINT_NAME, constraintName);
-          command.add(CHECK_EXPRESSION, col.checkExpression);
+          command.add(CHECK_EXPRESSION, col.getCheckExpression());
+          command.add(ENABLE, col.getEnable());
+          command.add(VALIDATE, col.getValidate());
+          command.add(RELY, col.getRely());
           constraints.add(command.render());
         }
       }
@@ -707,11 +726,12 @@ public class DDLPlanUtils {
 
 
   public void getAlterTableStmtNotNullConstraint(NotNullConstraint nc, Table 
tb, List<String> constraints) {
-    if (!NotNullConstraint.isNotNullConstraintNotEmpty(nc)) {
+    if (!NotNullConstraint.isNotEmpty(nc)) {
       return;
     }
     Map<String, String> colType = getTableColumnsToType(tb);
     Map<String, String> notNullConstraints = nc.getNotNullConstraints();
+    Map<String, List<String>> enableValidateRely = nc.getEnableValidateRely();
     for (String constraintName : notNullConstraints.keySet()) {
       ST command = new ST(ALTER_TABLE_ADD_NOT_NULL_CONSTRAINT);
       command.add(DATABASE_NAME, nc.getDatabaseName());
@@ -719,6 +739,9 @@ public class DDLPlanUtils {
       command.add(COLUMN_NAME, notNullConstraints.get(constraintName));
       command.add(COL_TYPE, 
colType.get(notNullConstraints.get(constraintName)));
       command.add(CONSTRAINT_NAME, constraintName);
+      command.add(ENABLE, enableValidateRely.get(constraintName).get(0));
+      command.add(VALIDATE, enableValidateRely.get(constraintName).get(1));
+      command.add(RELY, enableValidateRely.get(constraintName).get(2));
       constraints.add(command.render());
     }
   }
@@ -738,9 +761,9 @@ public class DDLPlanUtils {
     return constraints;
   }
 
-  public List<String> addExplainPlans(String sql){
+  public List<String> addExplainPlans(String sql) {
     List<String> exp = new ArrayList<String>();
-    for(String ex : explain_plans){
+    for (String ex : explain_plans) {
       exp.add(sql.replaceAll("(?i)explain ddl", ex) + ";");
     }
     return exp;
@@ -770,7 +793,7 @@ public class DDLPlanUtils {
   }
 
 
-  public String getCreateTableCommand(Table table, boolean isRelative) {
+  public String getCreateTableCommand(Table table, boolean isRelative) throws 
HiveException {
     ST command = new ST(CREATE_TABLE_TEMPLATE);
 
     if (!isRelative) {
@@ -800,7 +823,7 @@ public class DDLPlanUtils {
     return table.getTableType() == TableType.EXTERNAL_TABLE ? "EXTERNAL " : "";
   }
 
-  private String getColumns(Table table) {
+  private String getColumns(Table table) throws HiveException {
     List<String> columnDescs = new ArrayList<String>();
     for (FieldSchema column : table.getCols()) {
       String columnType = 
formatType(TypeInfoUtils.getTypeInfoFromTypeString(column.getType()));
@@ -813,50 +836,52 @@ public class DDLPlanUtils {
     return StringUtils.join(columnDescs, ", \n");
   }
 
-  /** Struct fields are identifiers, need to be put between ``. */
-  private String formatType(TypeInfo typeInfo) {
+  /**
+   * Struct fields are identifiers, need to be put between ``.
+   */
+  private String formatType(TypeInfo typeInfo) throws HiveException {
     switch (typeInfo.getCategory()) {
-    case PRIMITIVE:
-      return typeInfo.getTypeName();
-    case STRUCT:
-      StringBuilder structFormattedType = new StringBuilder();
-
-      StructTypeInfo structTypeInfo = (StructTypeInfo)typeInfo;
-      for (int i = 0; i < structTypeInfo.getAllStructFieldNames().size(); i++) 
{
-        if (structFormattedType.length() != 0) {
-          structFormattedType.append(", ");
-        }
+      case PRIMITIVE:
+        return typeInfo.getTypeName();
+      case STRUCT:
+        StringBuilder structFormattedType = new StringBuilder();
 
-        String structElementName = 
structTypeInfo.getAllStructFieldNames().get(i);
-        String structElementType = 
formatType(structTypeInfo.getAllStructFieldTypeInfos().get(i));
+        StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
+        for (int i = 0; i < structTypeInfo.getAllStructFieldNames().size(); 
i++) {
+          if (structFormattedType.length() != 0) {
+            structFormattedType.append(", ");
+          }
 
-        structFormattedType.append("`" + structElementName + "`:" + 
structElementType);
-      }
-      return "struct<" + structFormattedType.toString() + ">";
-    case LIST:
-      ListTypeInfo listTypeInfo = (ListTypeInfo)typeInfo;
-      String elementType = formatType(listTypeInfo.getListElementTypeInfo());
-      return "array<" + elementType + ">";
-    case MAP:
-      MapTypeInfo mapTypeInfo = (MapTypeInfo)typeInfo;
-      String keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo().getTypeName();
-      String valueTypeInfo = formatType(mapTypeInfo.getMapValueTypeInfo());
-      return "map<" + keyTypeInfo + "," + valueTypeInfo + ">";
-    case UNION:
-      StringBuilder unionFormattedType = new StringBuilder();
-
-      UnionTypeInfo unionTypeInfo = (UnionTypeInfo)typeInfo;
-      for (TypeInfo unionElementTypeInfo : 
unionTypeInfo.getAllUnionObjectTypeInfos()) {
-        if (unionFormattedType.length() != 0) {
-          unionFormattedType.append(", ");
-        }
+          String structElementName = 
structTypeInfo.getAllStructFieldNames().get(i);
+          String structElementType = 
formatType(structTypeInfo.getAllStructFieldTypeInfos().get(i));
 
-        String unionElementType = formatType(unionElementTypeInfo);
-        unionFormattedType.append(unionElementType);
-      }
-      return "uniontype<" + unionFormattedType.toString() + ">";
-    default:
-      throw new RuntimeException("Unknown type: " + typeInfo.getCategory());
+          structFormattedType.append("`" + structElementName + "`:" + 
structElementType);
+        }
+        return "struct<" + structFormattedType.toString() + ">";
+      case LIST:
+        ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
+        String elementType = formatType(listTypeInfo.getListElementTypeInfo());
+        return "array<" + elementType + ">";
+      case MAP:
+        MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
+        String keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo().getTypeName();
+        String valueTypeInfo = formatType(mapTypeInfo.getMapValueTypeInfo());
+        return "map<" + keyTypeInfo + "," + valueTypeInfo + ">";
+      case UNION:
+        StringBuilder unionFormattedType = new StringBuilder();
+
+        UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
+        for (TypeInfo unionElementTypeInfo : 
unionTypeInfo.getAllUnionObjectTypeInfos()) {
+          if (unionFormattedType.length() != 0) {
+            unionFormattedType.append(", ");
+          }
+
+          String unionElementType = formatType(unionElementTypeInfo);
+          unionFormattedType.append(unionElementType);
+        }
+        return "uniontype<" + unionFormattedType.toString() + ">";
+      default:
+        throw new RuntimeException("Unknown type: " + typeInfo.getCategory());
     }
   }
 
@@ -871,7 +896,7 @@ public class DDLPlanUtils {
       return "";
     }
     List<String> partitionCols = new ArrayList<String>();
-    for(String col:table.getPartColNames()) {
+    for (String col : table.getPartColNames()) {
       partitionCols.add('`' + col + '`');
     }
     return " PARTITIONED ON (" + StringUtils.join(partitionCols, ", ") + ")";
@@ -896,7 +921,7 @@ public class DDLPlanUtils {
 
   private String getPartitionsBySpec(Table table) {
     if (table.isNonNative() && table.getStorageHandler() != null &&
-        table.getStorageHandler().supportsPartitionTransform()) {
+      table.getStorageHandler().supportsPartitionTransform()) {
       List<PartitionTransformSpec> specs = 
table.getStorageHandler().getPartitionTransformSpec(table);
       if (specs.isEmpty()) {
         return "";
@@ -907,8 +932,8 @@ public class DDLPlanUtils {
           partitionTransforms.add(spec.getColumnName());
         } else {
           partitionTransforms.add(spec.getTransformType().name() + "(" +
-              (spec.getTransformParam().isPresent() ? 
spec.getTransformParam().get() + ", " : "") +
-              spec.getColumnName() + ")");
+            (spec.getTransformParam().isPresent() ? 
spec.getTransformParam().get() + ", " : "") +
+            spec.getColumnName() + ")");
         }
       }
       return "PARTITIONED BY SPEC ( \n" + 
StringUtils.join(partitionTransforms, ", \n") + ")";
@@ -950,8 +975,8 @@ public class DDLPlanUtils {
     }
 
     String skewed =
-        "SKEWED BY (" + StringUtils.join(skewedInfo.getSkewedColNames(), ",") 
+ ")\n" +
-            "  ON (" + StringUtils.join(columnValuesList, ",") + ")";
+      "SKEWED BY (" + StringUtils.join(skewedInfo.getSkewedColNames(), ",") + 
")\n" +
+        "  ON (" + StringUtils.join(columnValuesList, ",") + ")";
     if (table.isStoredAsSubDirectories()) {
       skewed += "\n  STORED AS DIRECTORIES";
     }
@@ -965,8 +990,8 @@ public class DDLPlanUtils {
     SerDeInfo serdeInfo = sd.getSerdeInfo();
 
     rowFormat
-        .append("ROW FORMAT SERDE \n")
-        .append("  '" + 
HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n");
+      .append("ROW FORMAT SERDE \n")
+      .append("  '" + 
HiveStringUtils.escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n");
 
     Map<String, String> serdeParams = serdeInfo.getParameters();
     if (table.getStorageHandler() == null) {
@@ -979,8 +1004,8 @@ public class DDLPlanUtils {
         rowFormat.append(" \n");
       }
       rowFormat
-          .append("STORED AS INPUTFORMAT \n  '" + 
HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n")
-          .append("OUTPUTFORMAT \n  '" + 
HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'");
+        .append("STORED AS INPUTFORMAT \n  '" + 
HiveStringUtils.escapeHiveCommand(sd.getInputFormat()) + "' \n")
+        .append("OUTPUTFORMAT \n  '" + 
HiveStringUtils.escapeHiveCommand(sd.getOutputFormat()) + "'");
     } else {
       String metaTableStorage = table.getParameters().get(META_TABLE_STORAGE);
       rowFormat.append("STORED BY \n  '" + 
HiveStringUtils.escapeHiveCommand(metaTableStorage) + "' \n");
@@ -997,13 +1022,13 @@ public class DDLPlanUtils {
     List<String> serdeCols = new ArrayList<String>();
     for (Entry<String, String> entry : sortedSerdeParams.entrySet()) {
       serdeCols.add("  '" + entry.getKey() + "'='" +
-          
HiveStringUtils.escapeUnicode(HiveStringUtils.escapeHiveCommand(entry.getValue()))
 + "'");
+        
HiveStringUtils.escapeUnicode(HiveStringUtils.escapeHiveCommand(entry.getValue()))
 + "'");
     }
 
     builder
-        .append("WITH SERDEPROPERTIES ( \n")
-        .append(StringUtils.join(serdeCols, ", \n"))
-        .append(')');
+      .append("WITH SERDEPROPERTIES ( \n")
+      .append(StringUtils.join(serdeCols, ", \n"))
+      .append(')');
   }
 
   private String getLocationBlock(Table table) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index 423583d..c59f44f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -430,7 +430,8 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
     return jsonObject;
   }
 
-  public void addCreateTableStatement(Table table, List<String> 
tableCreateStmt , DDLPlanUtils ddlPlanUtils){
+  public void addCreateTableStatement(Table table, List<String> 
tableCreateStmt , DDLPlanUtils ddlPlanUtils)
+    throws HiveException {
     tableCreateStmt.add(ddlPlanUtils.getCreateTableCommand(table, false) + 
";");
   }
   
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
index 5eb986e..7649180 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
@@ -34,13 +34,40 @@ import 
org.apache.hadoop.hive.metastore.api.SQLCheckConstraint;
 @SuppressWarnings("serial")
 public class CheckConstraint implements Serializable {
 
-  public class CheckConstraintCol {
-    public String colName;
-    public String checkExpression;
-
-    public CheckConstraintCol(String colName, String checkExpression) {
+  public static class CheckConstraintCol {
+    private final String colName;
+    private final String checkExpression;
+    private final String enable;
+    private final String validate;
+    private final String rely;
+
+    public CheckConstraintCol(String colName, String checkExpression, String 
enable,
+                              String validate, String rely) {
       this.colName = colName;
-      this.checkExpression= checkExpression;
+      this.checkExpression = checkExpression;
+      this.enable = enable;
+      this.validate = validate;
+      this.rely = rely;
+    }
+
+    public String getColName() {
+      return colName;
+    }
+
+    public String getCheckExpression() {
+      return checkExpression;
+    }
+
+    public String getEnable() {
+      return enable;
+    }
+
+    public String getValidate() {
+      return validate;
+    }
+
+    public String getRely() {
+      return rely;
     }
   }
 
@@ -56,7 +83,7 @@ public class CheckConstraint implements Serializable {
   public CheckConstraint() {}
 
   public CheckConstraint(List<SQLCheckConstraint> checkConstraintsList) {
-    checkConstraints = new TreeMap<String, List<CheckConstraintCol>>();
+    checkConstraints = new TreeMap<>();
     checkExpressionList = new ArrayList<>();
     if (checkConstraintsList == null) {
       return;
@@ -65,13 +92,16 @@ public class CheckConstraint implements Serializable {
       this.tableName = checkConstraintsList.get(0).getTable_name();
       this.databaseName= checkConstraintsList.get(0).getTable_db();
     }
-    for (SQLCheckConstraint uk : checkConstraintsList) {
-      String colName = uk.getColumn_name();
-      String check_expression = uk.getCheck_expression();
+    for (SQLCheckConstraint constraint : checkConstraintsList) {
+      String colName = constraint.getColumn_name();
+      String check_expression = constraint.getCheck_expression();
+      String enable = constraint.isEnable_cstr()? "ENABLE": "DISABLE";
+      String validate = constraint.isValidate_cstr()? "VALIDATE": "NOVALIDATE";
+      String rely = constraint.isRely_cstr()? "RELY": "NORELY";
       checkExpressionList.add(check_expression);
       CheckConstraintCol currCol = new CheckConstraintCol(
-          colName, check_expression);
-      String constraintName = uk.getDc_name();
+        colName, check_expression, enable, validate, rely);
+      String constraintName = constraint.getDc_name();
       if (checkConstraints.containsKey(constraintName)) {
         checkConstraints.get(constraintName).add(currCol);
       } else {
@@ -119,7 +149,7 @@ public class CheckConstraint implements Serializable {
     return sb.toString();
   }
 
-  public static boolean isCheckConstraintNotEmpty(CheckConstraint info) {
+  public static boolean isNotEmpty(CheckConstraint info) {
     return info != null && !info.getCheckConstraints().isEmpty();
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java
index c101f3d..c0d12ef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java
@@ -36,10 +36,16 @@ public class DefaultConstraint implements Serializable {
   public class DefaultConstraintCol {
     public String colName;
     public String defaultVal;
+    public String enable;
+    public String validate;
+    public String rely;
 
-    public DefaultConstraintCol(String colName, String defaultVal) {
+    public DefaultConstraintCol(String colName, String defaultVal, String 
enable, String validate, String rely) {
       this.colName = colName;
       this.defaultVal = defaultVal;
+      this.enable = enable;
+      this.validate = validate;
+      this.rely = rely;
     }
   }
 
@@ -56,8 +62,8 @@ public class DefaultConstraint implements Serializable {
   public DefaultConstraint(List<SQLDefaultConstraint> defaultConstraintList, 
String tableName, String databaseName) {
     this.tableName = tableName;
     this.databaseName = databaseName;
-    defaultConstraints = new TreeMap<String, List<DefaultConstraintCol>>();
-    colNameToDefaultValueMap = new TreeMap<String, String>();
+    defaultConstraints = new TreeMap<>();
+    colNameToDefaultValueMap = new TreeMap<>();
     if (defaultConstraintList == null) {
       return;
     }
@@ -67,8 +73,11 @@ public class DefaultConstraint implements Serializable {
         String colName = uk.getColumn_name();
         String defVal = uk.getDefault_value();
         colNameToDefaultValueMap.put(colName, defVal);
+        String enable = uk.isEnable_cstr()? "ENABLE": "DISABLE";
+        String validate = uk.isValidate_cstr()? "VALIDATE": "NOVALIDATE";
+        String rely = uk.isRely_cstr()? "RELY": "NORELY";
         DefaultConstraintCol currCol = new DefaultConstraintCol(
-                colName, defVal);
+                colName, defVal, enable, validate, rely);
         String constraintName = uk.getDc_name();
         if (defaultConstraints.containsKey(constraintName)) {
           defaultConstraints.get(constraintName).add(currCol);
@@ -119,7 +128,7 @@ public class DefaultConstraint implements Serializable {
     return sb.toString();
   }
 
-  public static boolean isCheckConstraintNotEmpty(DefaultConstraint info) {
+  public static boolean isNotEmpty(DefaultConstraint info) {
     return info != null && !info.getDefaultConstraints().isEmpty();
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
index f2c978a..890edb2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
@@ -43,14 +43,20 @@ public class ForeignKeyInfo implements Serializable {
     public String parentColName;
     public String childColName;
     public Integer position;
+    public String enable;
+    public String validate;
+    public String rely;
 
     public ForeignKeyCol(String parentTableName, String parentDatabaseName, 
String parentColName,
-      String childColName, Integer position) {
+      String childColName, Integer position, String enable, String validate, 
String rely) {
       this.parentTableName = parentTableName;
       this.parentDatabaseName = parentDatabaseName;
       this.parentColName = parentColName;
       this.childColName = childColName;
       this.position = position;
+      this.enable = enable;
+      this.validate = validate;
+      this.rely = rely;
     }
   }
 
@@ -64,15 +70,18 @@ public class ForeignKeyInfo implements Serializable {
   public ForeignKeyInfo(List<SQLForeignKey> fks, String childTableName, String 
childDatabaseName) {
     this.childTableName = childTableName;
     this.childDatabaseName = childDatabaseName;
-    foreignKeys = new TreeMap<String, List<ForeignKeyCol>>();
+    foreignKeys = new TreeMap<>();
     if (fks == null) {
       return;
     }
     for (SQLForeignKey fk : fks) {
       if (fk.getFktable_db().equalsIgnoreCase(childDatabaseName) &&
           fk.getFktable_name().equalsIgnoreCase(childTableName)) {
+        String enable = fk.isEnable_cstr()? "ENABLE": "DISABLE";
+        String validate = fk.isValidate_cstr()? "VALIDATE": "NOVALIDATE";
+        String rely = fk.isRely_cstr()? "RELY": "NORELY";
         ForeignKeyCol currCol = new ForeignKeyCol(fk.getPktable_name(), 
fk.getPktable_db(),
-          fk.getPkcolumn_name(), fk.getFkcolumn_name(), fk.getKey_seq());
+          fk.getPkcolumn_name(), fk.getFkcolumn_name(), fk.getKey_seq(), 
enable, validate, rely);
         String constraintName = fk.getFk_name();
         if (foreignKeys.containsKey(constraintName)) {
           foreignKeys.get(constraintName).add(currCol);
@@ -134,7 +143,7 @@ public class ForeignKeyInfo implements Serializable {
     return sb.toString();
   }
 
-  public static boolean isForeignKeyInfoNotEmpty(ForeignKeyInfo info) {
+  public static boolean isNotEmpty(ForeignKeyInfo info) {
     return info != null && !info.getForeignKeys().isEmpty();
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index b5a8d36..aeaaf58 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -6058,33 +6058,33 @@ private void constructOneLBLocationMap(FileStatus fSta,
       if (fetchReliable && tableConstraints != null) {
         if (CollectionUtils.isNotEmpty(tableConstraints.getPrimaryKeys())) {
           tableConstraints.setPrimaryKeys(
-              tableConstraints.getPrimaryKeys().stream().filter(primaryKey -> 
primaryKey.isRely_cstr())
+              
tableConstraints.getPrimaryKeys().stream().filter(SQLPrimaryKey::isRely_cstr)
                   .collect(Collectors.toList()));
         }
         if (CollectionUtils.isNotEmpty(tableConstraints.getForeignKeys())) {
           tableConstraints.setForeignKeys(
-              tableConstraints.getForeignKeys().stream().filter(foreignKey -> 
foreignKey.isRely_cstr())
+              
tableConstraints.getForeignKeys().stream().filter(SQLForeignKey::isRely_cstr)
                   .collect(Collectors.toList()));
         }
         if 
(CollectionUtils.isNotEmpty(tableConstraints.getUniqueConstraints())) {
           
tableConstraints.setUniqueConstraints(tableConstraints.getUniqueConstraints().stream()
-              .filter(uniqueConstraint -> 
uniqueConstraint.isRely_cstr()).collect(Collectors.toList()));
+              
.filter(SQLUniqueConstraint::isRely_cstr).collect(Collectors.toList()));
         }
         if 
(CollectionUtils.isNotEmpty(tableConstraints.getNotNullConstraints())) {
           
tableConstraints.setNotNullConstraints(tableConstraints.getNotNullConstraints().stream()
-              .filter(notNullConstraint -> 
notNullConstraint.isRely_cstr()).collect(Collectors.toList()));
+              
.filter(SQLNotNullConstraint::isRely_cstr).collect(Collectors.toList()));
         }
       }
 
       if (fetchEnabled && tableConstraints != null) {
         if 
(CollectionUtils.isNotEmpty(tableConstraints.getCheckConstraints())) {
           tableConstraints.setCheckConstraints(
-              
tableConstraints.getCheckConstraints().stream().filter(checkConstraint -> 
checkConstraint.isEnable_cstr())
+              
tableConstraints.getCheckConstraints().stream().filter(SQLCheckConstraint::isEnable_cstr)
                   .collect(Collectors.toList()));
         }
         if 
(CollectionUtils.isNotEmpty(tableConstraints.getDefaultConstraints())) {
           
tableConstraints.setDefaultConstraints(tableConstraints.getDefaultConstraints().stream()
-              .filter(defaultConstraint -> 
defaultConstraint.isEnable_cstr()).collect(Collectors.toList()));
+              
.filter(SQLDefaultConstraint::isEnable_cstr).collect(Collectors.toList()));
         }
       }
       return new TableConstraintsInfo(new 
PrimaryKeyInfo(tableConstraints.getPrimaryKeys(), tblName, dbName),
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java
index 8b50b7c..43439dd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java
@@ -19,10 +19,12 @@
 package org.apache.hadoop.hive.ql.metadata;
 
 import java.io.Serializable;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
+import com.google.common.collect.ImmutableList;
 import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 
 /**
@@ -36,19 +38,25 @@ public class NotNullConstraint implements Serializable {
   Map<String, String> notNullConstraints;
   String databaseName;
   String tableName;
+  Map<String, List<String>> enableValidateRely;
 
   public NotNullConstraint() {}
 
   public NotNullConstraint(List<SQLNotNullConstraint> nns, String tableName, 
String databaseName) {
     this.databaseName = databaseName;
     this.tableName = tableName;
-    this.notNullConstraints = new TreeMap<String, String>();
+    this.notNullConstraints = new TreeMap<>();
+    enableValidateRely = new HashMap<>();
     if (nns ==null) {
       return;
     }
     for (SQLNotNullConstraint pk : nns) {
       if (pk.getTable_db().equalsIgnoreCase(databaseName) &&
           pk.getTable_name().equalsIgnoreCase(tableName)) {
+        String enable = pk.isEnable_cstr()? "ENABLE": "DISABLE";
+        String validate = pk.isValidate_cstr()? "VALIDATE": "NOVALIDATE";
+        String rely = pk.isRely_cstr()? "RELY": "NORELY";
+        enableValidateRely.put(pk.getNn_name(), ImmutableList.of(enable, 
validate, rely));
         notNullConstraints.put(pk.getNn_name(), pk.getColumn_name());
       }
     }
@@ -66,6 +74,10 @@ public class NotNullConstraint implements Serializable {
     return notNullConstraints;
   }
 
+  public Map<String, List<String>> getEnableValidateRely() {
+    return enableValidateRely;
+  }
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
@@ -83,7 +95,7 @@ public class NotNullConstraint implements Serializable {
     return sb.toString();
   }
 
-  public static boolean isNotNullConstraintNotEmpty(NotNullConstraint info) {
+  public static boolean isNotEmpty(NotNullConstraint info) {
     return info != null && !info.getNotNullConstraints().isEmpty();
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
index f9348c6..677b446 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
@@ -34,20 +34,28 @@ import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 @SuppressWarnings("serial")
 public class PrimaryKeyInfo implements Serializable {
 
-  Map<Integer, String> colNames;
-  String constraintName;
-  String tableName;
-  String databaseName;
+  private Map<Integer, String> colNames;
+  private String constraintName;
+  private String tableName;
+  private String databaseName;
+  private String enable;
+  private String validate;
+  private String rely;
 
   public PrimaryKeyInfo() {}
 
   public PrimaryKeyInfo(List<SQLPrimaryKey> pks, String tableName, String 
databaseName) {
     this.tableName = tableName;
     this.databaseName = databaseName;
-    this.colNames = new TreeMap<Integer, String>();
+    this.colNames = new TreeMap<>();
     if (pks ==null) {
       return;
     }
+    if (!pks.isEmpty()) {
+      this.enable = pks.get(0).isEnable_cstr()? "ENABLE": "DISABLE";
+      this.validate = pks.get(0).isValidate_cstr()? "VALIDATE": "NOVALIDATE";
+      this.rely = pks.get(0).isRely_cstr()? "RELY": "NORELY";
+    }
     for (SQLPrimaryKey pk : pks) {
       if (pk.getTable_db().equalsIgnoreCase(databaseName) &&
           pk.getTable_name().equalsIgnoreCase(tableName)) {
@@ -88,7 +96,31 @@ public class PrimaryKeyInfo implements Serializable {
   public void setColNames(Map<Integer, String> colNames) {
     this.colNames = colNames;
   }
-  
+
+  public String getEnable() {
+    return enable;
+  }
+
+  public void setEnable(String enable) {
+    this.enable = enable;
+  }
+
+  public String getValidate() {
+    return validate;
+  }
+
+  public void setValidate(String validate) {
+    this.validate = validate;
+  }
+
+  public String getRely() {
+    return rely;
+  }
+
+  public void setRely(String rely) {
+    this.rely = rely;
+  }
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
@@ -104,7 +136,7 @@ public class PrimaryKeyInfo implements Serializable {
     return sb.toString();
   }
 
-  public static boolean isPrimaryKeyInfoNotEmpty(PrimaryKeyInfo info) {
+  public static boolean isNotEmpty(PrimaryKeyInfo info) {
     return info != null && !info.getColNames().isEmpty();
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableConstraintsInfo.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableConstraintsInfo.java
index 247098d..cfe2038 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableConstraintsInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/TableConstraintsInfo.java
@@ -89,11 +89,11 @@ public class TableConstraintsInfo {
   }
 
   public boolean isTableConstraintsInfoNotEmpty() {
-    return PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(this.getPrimaryKeyInfo()) 
|| ForeignKeyInfo
-        .isForeignKeyInfoNotEmpty(this.getForeignKeyInfo()) || UniqueConstraint
-        .isUniqueConstraintNotEmpty(this.getUniqueConstraint()) || 
NotNullConstraint
-        .isNotNullConstraintNotEmpty(this.getNotNullConstraint()) || 
CheckConstraint
-        .isCheckConstraintNotEmpty(this.getCheckConstraint()) || 
DefaultConstraint
-        .isCheckConstraintNotEmpty(this.getDefaultConstraint());
+    return PrimaryKeyInfo.isNotEmpty(this.getPrimaryKeyInfo()) ||
+      ForeignKeyInfo.isNotEmpty(this.getForeignKeyInfo()) ||
+      UniqueConstraint.isNotEmpty(this.getUniqueConstraint()) ||
+      NotNullConstraint.isNotEmpty(this.getNotNullConstraint()) ||
+      CheckConstraint.isNotEmpty(this.getCheckConstraint()) ||
+      DefaultConstraint.isNotEmpty(this.getDefaultConstraint());
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java
index 1fbe76d..502f3a5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java
@@ -36,10 +36,16 @@ public class UniqueConstraint implements Serializable {
   public class UniqueConstraintCol {
     public String colName;
     public Integer position;
+    public String enable;
+    public String validate;
+    public String rely;
 
-    public UniqueConstraintCol(String colName, Integer position) {
+    public UniqueConstraintCol(String colName, Integer position, String 
enable, String validate, String rely) {
       this.colName = colName;
       this.position = position;
+      this.enable = enable;
+      this.validate = validate;
+      this.rely = rely;
     }
   }
 
@@ -53,15 +59,18 @@ public class UniqueConstraint implements Serializable {
   public UniqueConstraint(List<SQLUniqueConstraint> uks, String tableName, 
String databaseName) {
     this.tableName = tableName;
     this.databaseName = databaseName;
-    uniqueConstraints = new TreeMap<String, List<UniqueConstraintCol>>();
+    uniqueConstraints = new TreeMap<>();
     if (uks == null) {
       return;
     }
     for (SQLUniqueConstraint uk : uks) {
       if (uk.getTable_db().equalsIgnoreCase(databaseName) &&
           uk.getTable_name().equalsIgnoreCase(tableName)) {
+        String enable = uk.isEnable_cstr()? "ENABLE": "DISABLE";
+        String validate = uk.isValidate_cstr()? "VALIDATE": "NOVALIDATE";
+        String rely = uk.isRely_cstr()? "RELY": "NORELY";
         UniqueConstraintCol currCol = new UniqueConstraintCol(
-                uk.getColumn_name(), uk.getKey_seq());
+                uk.getColumn_name(), uk.getKey_seq(), enable, validate, rely);
         String constraintName = uk.getUk_name();
         if (uniqueConstraints.containsKey(constraintName)) {
           uniqueConstraints.get(constraintName).add(currCol);
@@ -109,7 +118,7 @@ public class UniqueConstraint implements Serializable {
     return sb.toString();
   }
 
-  public static boolean isUniqueConstraintNotEmpty(UniqueConstraint info) {
+  public static boolean isNotEmpty(UniqueConstraint info) {
     return info != null && !info.getUniqueConstraints().isEmpty();
   }
 }
diff --git a/ql/src/test/queries/clientpositive/show_create_table.q 
b/ql/src/test/queries/clientpositive/show_create_table.q
new file mode 100644
index 0000000..0262b1e1
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/show_create_table.q
@@ -0,0 +1,44 @@
+CREATE TABLE TEST(
+  col1 varchar(100) NOT NULL COMMENT "comment for column 1",
+  col2 timestamp DEFAULT CURRENT_TIMESTAMP() COMMENT "comment for column 2",
+  col3 decimal CHECK (col3 + col4 > 1) enable novalidate rely,
+  col4 decimal NOT NULL,
+  col5 varchar(100),
+  primary key(col1, col2) disable novalidate rely,
+  constraint c3_c4_check CHECK((col3 + col4)/(col3 - col4) > 3) enable 
novalidate norely,
+  constraint c4_unique UNIQUE(col4) disable novalidate rely)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';
+
+CREATE TABLE TEST2(
+ col varchar(100),
+ primary key(col) disable novalidate rely)
+ROW FORMAT SERDE
+'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';
+
+CREATE TABLE TEST3(
+  col1 varchar(100) COMMENT "comment",
+  col2 timestamp,
+  col3 varchar(100),
+  foreign key(col1, col2) references TEST(col1, col2) disable novalidate rely,
+  foreign key(col3) references TEST2(col) disable novalidate norely)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';
+
+SHOW CREATE TABLE TEST;
+
+SHOW CREATE TABLE TEST2;
+
+SHOW CREATE TABLE TEST3;
diff --git 
a/ql/src/test/results/clientpositive/llap/constraints_explain_ddl.q.out 
b/ql/src/test/results/clientpositive/llap/constraints_explain_ddl.q.out
index c38adaa..28063e8 100644
--- a/ql/src/test/results/clientpositive/llap/constraints_explain_ddl.q.out
+++ b/ql/src/test/results/clientpositive/llap/constraints_explain_ddl.q.out
@@ -99,7 +99,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE;
+ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer_removal_n0 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -170,7 +170,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE;
+ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer_removal_n0 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -251,7 +251,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE;
+ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -334,7 +334,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE;
+ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -480,7 +480,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE;
+ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -626,7 +626,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE;
+ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -897,9 +897,9 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE;
+ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer_removal_n0 UPDATE STATISTICS 
SET('numRows'='1','rawDataSize'='22' );
-ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE;
+ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS 
SET('numRows'='2','rawDataSize'='102' );
 ALTER TABLE default.customer_removal_n0 UPDATE STATISTICS FOR COLUMN c_address 
SET('avgColLen'='0.0','maxColLen'='0','numNulls'='1','numDVs'='1' );
 -- BIT VECTORS PRESENT FOR default.customer_removal_n0 FOR COLUMN c_address 
BUT THEY ARE NOT SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS SExMoAEA 
@@ -1107,7 +1107,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE;
+ALTER TABLE default.dates_removal_n0 ADD CONSTRAINT #### A masked pattern was 
here #### PRIMARY KEY (d_datekey,d_id) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS 
SET('numRows'='2','rawDataSize'='102' );
 ALTER TABLE default.dates_removal_n0 UPDATE STATISTICS FOR COLUMN d_date 
SET('avgColLen'='0.0','maxColLen'='0','numNulls'='2','numDVs'='1' );
 -- BIT VECTORS PRESENT FOR default.dates_removal_n0 FOR COLUMN d_date BUT THEY 
ARE NOT SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS SExMoAEA 
@@ -1310,7 +1310,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE;
+ALTER TABLE default.customer_removal_n0 ADD CONSTRAINT #### A masked pattern 
was here #### PRIMARY KEY (c_custkey) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer_removal_n0 UPDATE STATISTICS 
SET('numRows'='1','rawDataSize'='22' );
 ALTER TABLE default.customer_removal_n0 UPDATE STATISTICS FOR COLUMN c_address 
SET('avgColLen'='0.0','maxColLen'='0','numNulls'='1','numDVs'='1' );
 -- BIT VECTORS PRESENT FOR default.customer_removal_n0 FOR COLUMN c_address 
BUT THEY ARE NOT SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS SExMoAEA 
@@ -1496,7 +1496,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -1560,7 +1560,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -1625,7 +1625,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -1690,7 +1690,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -1819,7 +1819,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -1950,7 +1950,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -2100,7 +2100,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -2282,7 +2282,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -2350,7 +2350,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (key1) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='6','rawDataSize'='28' );
 ALTER TABLE default.dest_g21 UPDATE STATISTICS FOR COLUMN key1 
SET('lowValue'='1','highValue'='6','numNulls'='0','numDVs'='6' );
 -- BIT VECTORS PRESENT FOR default.dest_g21 FOR COLUMN key1 BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAYGwfO+SIGmu+f//////wHC9+jHAf6diLP//////wG/9IJOg97xwAI= 
@@ -2508,7 +2508,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.tconst UPDATE STATISTICS 
SET('numRows'='3','rawDataSize'='25' );
-ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN d_year 
SET('avgColLen'='4.0','maxColLen'='4','numNulls'='0','numDVs'='3' );
 -- BIT VECTORS PRESENT FOR default.tconst FOR COLUMN d_year BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAMDwrjb8gb/vrr2+f////8BgaCT+///////AQ== 
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN i 
SET('lowValue'='1','highValue'='3','numNulls'='0','numDVs'='3' );
@@ -2637,7 +2637,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.tconst UPDATE STATISTICS 
SET('numRows'='3','rawDataSize'='25' );
-ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN d_year 
SET('avgColLen'='4.0','maxColLen'='4','numNulls'='0','numDVs'='3' );
 -- BIT VECTORS PRESENT FOR default.tconst FOR COLUMN d_year BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAMDwrjb8gb/vrr2+f////8BgaCT+///////AQ== 
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN i 
SET('lowValue'='1','highValue'='3','numNulls'='0','numDVs'='3' );
@@ -2767,7 +2767,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.tconst UPDATE STATISTICS 
SET('numRows'='3','rawDataSize'='25' );
-ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN d_year 
SET('avgColLen'='4.0','maxColLen'='4','numNulls'='0','numDVs'='3' );
 -- BIT VECTORS PRESENT FOR default.tconst FOR COLUMN d_year BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAMDwrjb8gb/vrr2+f////8BgaCT+///////AQ== 
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN i 
SET('lowValue'='1','highValue'='3','numNulls'='0','numDVs'='3' );
@@ -2896,7 +2896,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.tconst UPDATE STATISTICS 
SET('numRows'='3','rawDataSize'='25' );
-ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN d_year 
SET('avgColLen'='4.0','maxColLen'='4','numNulls'='0','numDVs'='3' );
 -- BIT VECTORS PRESENT FOR default.tconst FOR COLUMN d_year BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAMDwrjb8gb/vrr2+f////8BgaCT+///////AQ== 
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN i 
SET('lowValue'='1','highValue'='3','numNulls'='0','numDVs'='3' );
@@ -3085,7 +3085,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.tconst UPDATE STATISTICS 
SET('numRows'='3','rawDataSize'='25' );
-ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.tconst CHANGE COLUMN i i int CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN d_year 
SET('avgColLen'='4.0','maxColLen'='4','numNulls'='0','numDVs'='3' );
 -- BIT VECTORS PRESENT FOR default.tconst FOR COLUMN d_year BUT THEY ARE NOT 
SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS 
SExMoAMDwrjb8gb/vrr2+f////8BgaCT+///////AQ== 
 ALTER TABLE default.tconst UPDATE STATISTICS FOR COLUMN i 
SET('lowValue'='1','highValue'='3','numNulls'='0','numDVs'='3' );
@@ -3284,8 +3284,8 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.dest_g21 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### UNIQUE (key1) DISABLE NOVALIDATE;
-ALTER TABLE default.dest_g21 CHANGE COLUMN key1 key1 int CONSTRAINT #### A 
masked pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.dest_g21 ADD CONSTRAINT #### A masked pattern was here 
#### UNIQUE (key1) DISABLE NOVALIDATE RELY;
+ALTER TABLE default.dest_g21 CHANGE COLUMN key1 key1 int CONSTRAINT #### A 
masked pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select key1 from dest_g21 group by key1, value1;
@@ -3347,7 +3347,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.dest_g24 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.dest_g24 ADD CONSTRAINT #### A masked pattern was here 
#### UNIQUE (key1) DISABLE NOVALIDATE;
+ALTER TABLE default.dest_g24 ADD CONSTRAINT #### A masked pattern was here 
#### UNIQUE (key1) DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select key1 from dest_g24 group by key1, value1;
@@ -3691,13 +3691,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -3975,13 +3975,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -4289,13 +4289,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -4684,13 +4684,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -5206,14 +5206,14 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.date_dim UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -5818,13 +5818,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -6110,13 +6110,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -6437,13 +6437,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -6761,13 +6761,13 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE;
+ALTER TABLE default.store_sales ADD CONSTRAINT pk_ss PRIMARY KEY 
(ss_item_sk,ss_ticket_number) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.store_sales UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE;
+ALTER TABLE default.customer ADD CONSTRAINT pk_c PRIMARY KEY (c_customer_sk) 
DISABLE NOVALIDATE RELY;
 ALTER TABLE default.customer UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.store_sales ADD CONSTRAINT ss_c FOREIGN KEY 
(ss_customer_sk) REFERENCES default.customer(c_customer_sk) DISABLE NOVALIDATE 
RELY;
-ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE;
-ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE;
+ALTER TABLE default.customer ADD CONSTRAINT uk1 UNIQUE (c_customer_id) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE default.customer CHANGE COLUMN c_customer_id c_customer_id string 
CONSTRAINT cid_nn NOT NULL DISABLE NOVALIDATE RELY;
 
 
 
@@ -7021,7 +7021,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.web_sales ADD CONSTRAINT pk1 PRIMARY KEY 
(ws_order_number,ws_item_sk) DISABLE NOVALIDATE;
+ALTER TABLE default.web_sales ADD CONSTRAINT pk1 PRIMARY KEY 
(ws_order_number,ws_item_sk) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.web_sales UPDATE STATISTICS 
SET('numRows'='2','rawDataSize'='14' );
 ALTER TABLE default.web_sales UPDATE STATISTICS FOR COLUMN ws_item_sk 
SET('lowValue'='1','highValue'='1','numNulls'='0','numDVs'='1' );
 -- BIT VECTORS PRESENT FOR default.web_sales FOR COLUMN ws_item_sk BUT THEY 
ARE NOT SUPPORTED YET. THE BASE64 VALUE FOR THE BITVECTOR IS SExMoAEBwfO+SA== 
@@ -7686,7 +7686,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table7_n3 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (a) DISABLE NOVALIDATE;
+ALTER TABLE default.table7_n3 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (a) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.table7_n3 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -7795,7 +7795,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table9 ADD CONSTRAINT #### A masked pattern was here #### 
PRIMARY KEY (a,b) DISABLE NOVALIDATE;
+ALTER TABLE default.table9 ADD CONSTRAINT #### A masked pattern was here #### 
PRIMARY KEY (a,b) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.table9 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -7905,7 +7905,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table11 ADD CONSTRAINT pk11 PRIMARY KEY (a) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table11 ADD CONSTRAINT pk11 PRIMARY KEY (a) DISABLE 
NOVALIDATE RELY;
 ALTER TABLE default.table11 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -8016,7 +8016,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table13 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table13 CHANGE COLUMN a a string CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.table13 CHANGE COLUMN a a string CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select * from table13;
@@ -8070,7 +8070,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table14 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table14 CHANGE COLUMN a a string CONSTRAINT nn14_1 NOT 
NULL DISABLE;
+ALTER TABLE default.table14 CHANGE COLUMN a a string CONSTRAINT nn14_1 NOT 
NULL DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select * from table14;
@@ -8232,7 +8232,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table17 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table17 ADD CONSTRAINT uk17_1 UNIQUE (a) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table17 ADD CONSTRAINT uk17_1 UNIQUE (a) DISABLE 
NOVALIDATE RELY;
 
 
 EXPLAIN select * from table17;
@@ -8287,7 +8287,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table18 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table18 ADD CONSTRAINT uk18_1 UNIQUE (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table18 ADD CONSTRAINT uk18_1 UNIQUE (b) DISABLE 
NOVALIDATE RELY;
 
 
 EXPLAIN select * from table18;
@@ -8340,7 +8340,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table19 ADD CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table19 ADD CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE 
NOVALIDATE RELY;
 ALTER TABLE default.table19 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.table19 ADD CONSTRAINT fk19_2 FOREIGN KEY (a) REFERENCES 
default.table19(b) DISABLE NOVALIDATE RELY;
 
@@ -8396,7 +8396,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table20 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table20 ADD CONSTRAINT uk20_1 UNIQUE (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table20 ADD CONSTRAINT uk20_1 UNIQUE (b) DISABLE 
NOVALIDATE RELY;
 
 
 EXPLAIN select * from table20;
@@ -8883,7 +8883,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table7_n3 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (a) DISABLE NOVALIDATE;
+ALTER TABLE default.table7_n3 ADD CONSTRAINT #### A masked pattern was here 
#### PRIMARY KEY (a) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.table7_n3 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -8992,7 +8992,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table9 ADD CONSTRAINT #### A masked pattern was here #### 
PRIMARY KEY (a,b) DISABLE NOVALIDATE;
+ALTER TABLE default.table9 ADD CONSTRAINT #### A masked pattern was here #### 
PRIMARY KEY (a,b) DISABLE NOVALIDATE RELY;
 ALTER TABLE default.table9 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -9102,7 +9102,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table11 ADD CONSTRAINT pk11 PRIMARY KEY (a) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table11 ADD CONSTRAINT pk11 PRIMARY KEY (a) DISABLE 
NOVALIDATE RELY;
 ALTER TABLE default.table11 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -9213,7 +9213,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table13 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table13 CHANGE COLUMN a a string CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.table13 CHANGE COLUMN a a string CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select * from table13;
@@ -9267,7 +9267,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table14 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table14 CHANGE COLUMN a a string CONSTRAINT nn14_1 NOT 
NULL DISABLE;
+ALTER TABLE default.table14 CHANGE COLUMN a a string CONSTRAINT nn14_1 NOT 
NULL DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select * from table14;
@@ -9429,7 +9429,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table17 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table17 ADD CONSTRAINT uk17_1 UNIQUE (a) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table17 ADD CONSTRAINT uk17_1 UNIQUE (a) DISABLE 
NOVALIDATE RELY;
 
 
 EXPLAIN select * from table17;
@@ -9484,7 +9484,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table18 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table18 ADD CONSTRAINT uk18_1 UNIQUE (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table18 ADD CONSTRAINT uk18_1 UNIQUE (b) DISABLE 
NOVALIDATE RELY;
 
 
 EXPLAIN select * from table18;
@@ -9537,7 +9537,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table19 ADD CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table19 ADD CONSTRAINT pk19_1 PRIMARY KEY (b) DISABLE 
NOVALIDATE RELY;
 ALTER TABLE default.table19 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 ALTER TABLE default.table19 ADD CONSTRAINT fk19_2 FOREIGN KEY (a) REFERENCES 
default.table19(b) DISABLE NOVALIDATE RELY;
 
@@ -9593,7 +9593,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table20 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table20 ADD CONSTRAINT uk20_1 UNIQUE (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table20 ADD CONSTRAINT uk20_1 UNIQUE (b) DISABLE 
NOVALIDATE RELY;
 
 
 EXPLAIN select * from table20;
@@ -10678,7 +10678,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table3_n1 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-
+ALTER TABLE default.table3_n1 ADD CONSTRAINT fk1 FOREIGN KEY (x) REFERENCES 
default.table2_n8(a) DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select * from table3_n1;
@@ -10785,7 +10785,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table8 ADD CONSTRAINT pk8_2 PRIMARY KEY (a,b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table8 ADD CONSTRAINT pk8_2 PRIMARY KEY (a,b) DISABLE 
NOVALIDATE RELY;
 ALTER TABLE default.table8 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
@@ -11024,7 +11024,7 @@ TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
 ALTER TABLE default.table13 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
-ALTER TABLE default.table13 CHANGE COLUMN a a string CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE;
+ALTER TABLE default.table13 CHANGE COLUMN a a string CONSTRAINT #### A masked 
pattern was here #### NOT NULL DISABLE NOVALIDATE RELY;
 
 
 EXPLAIN select * from table13;
@@ -11547,7 +11547,7 @@ LOCATION
 TBLPROPERTIES (
   'bucketing_version'='2', 
 #### A masked pattern was here ####
-ALTER TABLE default.table23 ADD CONSTRAINT pk23_1 PRIMARY KEY (b) DISABLE 
NOVALIDATE;
+ALTER TABLE default.table23 ADD CONSTRAINT pk23_1 PRIMARY KEY (b) DISABLE 
NOVALIDATE RELY;
 ALTER TABLE default.table23 UPDATE STATISTICS 
SET('numRows'='0','rawDataSize'='0' );
 
 
diff --git a/ql/src/test/results/clientpositive/llap/show_create_table.q.out 
b/ql/src/test/results/clientpositive/llap/show_create_table.q.out
new file mode 100644
index 0000000..ef16c3f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/show_create_table.q.out
@@ -0,0 +1,163 @@
+PREHOOK: query: CREATE TABLE TEST(
+  col1 varchar(100) NOT NULL COMMENT "comment for column 1",
+  col2 timestamp DEFAULT CURRENT_TIMESTAMP() COMMENT "comment for column 2",
+  col3 decimal CHECK (col3 + col4 > 1) enable novalidate rely,
+  col4 decimal NOT NULL,
+  col5 varchar(100),
+  primary key(col1, col2) disable novalidate rely,
+  constraint c3_c4_check CHECK((col3 + col4)/(col3 - col4) > 3) enable 
novalidate norely,
+  constraint c4_unique UNIQUE(col4) disable novalidate rely)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST
+POSTHOOK: query: CREATE TABLE TEST(
+  col1 varchar(100) NOT NULL COMMENT "comment for column 1",
+  col2 timestamp DEFAULT CURRENT_TIMESTAMP() COMMENT "comment for column 2",
+  col3 decimal CHECK (col3 + col4 > 1) enable novalidate rely,
+  col4 decimal NOT NULL,
+  col5 varchar(100),
+  primary key(col1, col2) disable novalidate rely,
+  constraint c3_c4_check CHECK((col3 + col4)/(col3 - col4) > 3) enable 
novalidate norely,
+  constraint c4_unique UNIQUE(col4) disable novalidate rely)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST
+PREHOOK: query: CREATE TABLE TEST2(
+ col varchar(100),
+ primary key(col) disable novalidate rely)
+ROW FORMAT SERDE
+'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST2
+POSTHOOK: query: CREATE TABLE TEST2(
+ col varchar(100),
+ primary key(col) disable novalidate rely)
+ROW FORMAT SERDE
+'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST2
+PREHOOK: query: CREATE TABLE TEST3(
+  col1 varchar(100) COMMENT "comment",
+  col2 timestamp,
+  col3 varchar(100),
+  foreign key(col1, col2) references TEST(col1, col2) disable novalidate rely,
+  foreign key(col3) references TEST2(col) disable novalidate norely)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@TEST3
+POSTHOOK: query: CREATE TABLE TEST3(
+  col1 varchar(100) COMMENT "comment",
+  col2 timestamp,
+  col3 varchar(100),
+  foreign key(col1, col2) references TEST(col1, col2) disable novalidate rely,
+  foreign key(col3) references TEST2(col) disable novalidate norely)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
+STORED AS INPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
+OUTPUTFORMAT
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@TEST3
+PREHOOK: query: SHOW CREATE TABLE TEST
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@test
+POSTHOOK: query: SHOW CREATE TABLE TEST
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@test
+CREATE TABLE `test`(
+  `col1` varchar(100) COMMENT 'comment for column 1', 
+  `col2` timestamp COMMENT 'comment for column 2', 
+  `col3` decimal(10,0), 
+  `col4` decimal(10,0), 
+  `col5` varchar(100))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+  'bucketing_version'='2', 
+#### A masked pattern was here ####
+ALTER TABLE default.test ADD CONSTRAINT #### A masked pattern was here #### 
PRIMARY KEY (col1,col2) DISABLE NOVALIDATE RELY;
+ALTER TABLE default.test ADD CONSTRAINT c4_unique UNIQUE (col4) DISABLE 
NOVALIDATE RELY;
+ALTER TABLE test.test CHANGE COLUMN col2 col2 timestamp CONSTRAINT  DEFAULT 
CURRENT_TIMESTAMP() ENABLE NOVALIDATE RELY;
+ALTER TABLE default.test ADD CONSTRAINT c3_c4_check CHECK ((col3 + col4)/(col3 
- col4) > 3) ENABLE NOVALIDATE NORELY;
+ALTER TABLE default.test ADD CONSTRAINT #### A masked pattern was here #### 
CHECK (col3 + col4 > 1) ENABLE NOVALIDATE RELY;
+ALTER TABLE default.test CHANGE COLUMN col1 col1 varchar(100) CONSTRAINT #### 
A masked pattern was here #### NOT NULL ENABLE NOVALIDATE RELY;
+ALTER TABLE default.test CHANGE COLUMN col4 col4 decimal(10,0) CONSTRAINT #### 
A masked pattern was here #### NOT NULL ENABLE NOVALIDATE RELY;
+PREHOOK: query: SHOW CREATE TABLE TEST2
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@test2
+POSTHOOK: query: SHOW CREATE TABLE TEST2
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@test2
+CREATE TABLE `test2`(
+  `col` varchar(100))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+  'bucketing_version'='2', 
+#### A masked pattern was here ####
+ALTER TABLE default.test2 ADD CONSTRAINT #### A masked pattern was here #### 
PRIMARY KEY (col) DISABLE NOVALIDATE RELY;
+PREHOOK: query: SHOW CREATE TABLE TEST3
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@test3
+POSTHOOK: query: SHOW CREATE TABLE TEST3
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@test3
+CREATE TABLE `test3`(
+  `col1` varchar(100) COMMENT 'comment', 
+  `col2` timestamp, 
+  `col3` varchar(100))
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.ql.io.orc.OrcSerde' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+  'bucketing_version'='2', 
+#### A masked pattern was here ####
+ALTER TABLE default.test3 ADD CONSTRAINT #### A masked pattern was here #### 
FOREIGN KEY (col1) REFERENCES default.test(col1) DISABLE NOVALIDATE RELY;
+ALTER TABLE default.test3 ADD CONSTRAINT #### A masked pattern was here #### 
FOREIGN KEY (col2) REFERENCES default.test(col2) DISABLE NOVALIDATE RELY;

Reply via email to