[ 
https://issues.apache.org/jira/browse/HIVE-25652?focusedWorklogId=676231&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-676231
 ]

ASF GitHub Bot logged work on HIVE-25652:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 04/Nov/21 06:41
            Start Date: 04/Nov/21 06:41
    Worklog Time Spent: 10m 
      Work Description: soumyakanti3578 commented on a change in pull request 
#2752:
URL: https://github.com/apache/hive/pull/2752#discussion_r742571038



##########
File path: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
##########
@@ -800,19 +809,166 @@ private String getExternal(Table table) {
     return table.getTableType() == TableType.EXTERNAL_TABLE ? "EXTERNAL " : "";
   }
 
-  private String getColumns(Table table) {
-    List<String> columnDescs = new ArrayList<String>();
+  private String getColumns(Table table) throws HiveException{
+    List<String> columnDescs = new ArrayList<>();
+    List<String> columns = 
table.getCols().stream().map(FieldSchema::getName).collect(Collectors.toList());
+    Set<String> notNullColumns = Collections.emptySet();
+    if (NotNullConstraint.isNotEmpty(table.getNotNullConstraint())) {
+      notNullColumns = new 
HashSet<>(table.getNotNullConstraint().getNotNullConstraints().values());
+    }
+
+    Map<String, String> columnDefaultValueMap = Collections.emptyMap();
+    if (DefaultConstraint.isNotEmpty(table.getDefaultConstraint())) {
+      columnDefaultValueMap = 
table.getDefaultConstraint().getColNameToDefaultValueMap();
+    }
+
+    List<SQLCheckConstraint> sqlCheckConstraints;
+    try {
+      sqlCheckConstraints = 
Hive.get().getCheckConstraintList(table.getDbName(), table.getTableName());
+    } catch (NoSuchObjectException e) {
+      throw new HiveException(e);
+    }
+    Map<String, SQLCheckConstraint> columnCheckConstraintsMap = 
sqlCheckConstraints.stream()
+      .filter(SQLCheckConstraint::isSetColumn_name)
+      .collect(Collectors.toMap(SQLCheckConstraint::getColumn_name, 
Function.identity()));
+    List<SQLCheckConstraint> tableCheckConstraints = 
sqlCheckConstraints.stream()
+      .filter(cc -> !cc.isSetColumn_name())
+      .collect(Collectors.toList());
+
     for (FieldSchema column : table.getCols()) {
       String columnType = 
formatType(TypeInfoUtils.getTypeInfoFromTypeString(column.getType()));
-      String columnDesc = "  `" + column.getName() + "` " + columnType;
+      String columnName = column.getName();
+      StringBuilder columnDesc = new StringBuilder();
+      columnDesc.append("  `").append(columnName).append("` 
").append(columnType);
+      if (notNullColumns.contains(columnName)) {
+        columnDesc.append(" NOT NULL");
+      }
+      if (columnDefaultValueMap.containsKey(columnName)) {
+        columnDesc.append(" DEFAULT 
").append(columnDefaultValueMap.get(columnName));
+      }
+      if (columnCheckConstraintsMap.containsKey(columnName)) {
+        
columnDesc.append(getColumnCheckConstraintDesc(columnCheckConstraintsMap.get(columnName),
 columns));
+      }
       if (column.getComment() != null) {
-        columnDesc += " COMMENT '" + 
HiveStringUtils.escapeHiveCommand(column.getComment()) + "'";
+        columnDesc.append(" COMMENT 
'").append(HiveStringUtils.escapeHiveCommand(column.getComment())).append("'");
       }
-      columnDescs.add(columnDesc);
+      columnDescs.add(columnDesc.toString());
     }
+    String pkDesc = getPrimaryKeyDesc(table);
+    if (pkDesc != null) {
+      columnDescs.add(pkDesc);
+    }
+    columnDescs.addAll(getForeignKeyDesc(table));
+    columnDescs.addAll(getTableCheckConstraintDesc(tableCheckConstraints, 
columns));
     return StringUtils.join(columnDescs, ", \n");
   }
 
+  private List<String> getTableCheckConstraintDesc(List<SQLCheckConstraint> 
tableCheckConstraints,
+                                                   List<String> columns) {
+    List<String> ccDescs = new ArrayList<>();
+    for (SQLCheckConstraint constraint: tableCheckConstraints) {
+      String enable = constraint.isEnable_cstr()? " enable": " disable";
+      String validate = constraint.isValidate_cstr()? " validate": " 
novalidate";
+      String rely = constraint.isRely_cstr()? " rely": " norely";
+      String expression = getCheckExpressionWithBackticks(columns, constraint);
+      ccDescs.add("  constraint " + constraint.getDc_name() + " CHECK(" + 
expression +
+        ")" + enable + validate + rely);
+    }
+    return ccDescs;
+  }
+
+  private String getCheckExpressionWithBackticks(List<String> columns, 
SQLCheckConstraint constraint) {
+    TreeMap<Integer, String> indexToCols = new TreeMap<>();
+    String expression = constraint.getCheck_expression();
+    for (String col: columns) {
+      int idx = expression.indexOf(col);
+      if (idx == -1) {
+        continue;
+      }
+      indexToCols.put(idx, col);
+      while (idx + col.length() < expression.length()) {
+        idx = expression.indexOf(col, idx + col.length());
+        if (idx == -1) {
+          break;
+        }
+        indexToCols.put(idx, col);
+      }
+    }

Review comment:
       This is how I'm collecting the column names:
   
https://github.com/apache/hive/pull/2752/files#diff-f73ee0723188f6bed5fcbc3950ee71781bfa2cfc9a0cf5f558c2d6469652392bR814
   So yes, they are String literals.
   
   I did the above test that you suggested, and I'm getting SemanticException:
   `org.apache.hadoop.hive.ql.parse.SemanticException: Invalid Constraint 
syntax Invalid CHECK constraint expression: col4 <> 'col4'. Line 1:0 Wrong 
arguments ''col4'': Unsafe compares between different types are disabled for 
safety reasons. If you know what you are doing, please set 
hive.strict.checks.type.safety to false and make sure that hive.mapred.mode is 
not set to 'strict' to proceed. Note that you may get errors or incorrect 
results if you make a mistake while using some of the unsafe features.`
   
   So this will fail by default during the table creation phase unless 
hive.strict.checks.type.safety is set to false.
   
   So I guess this is a bit hacky and we should do it properly. I tried (still 
trying) converting the expression to ASTNode, which was fine but step 2 & 3 are 
a bit confusing.
   




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 676231)
    Time Spent: 1h 50m  (was: 1h 40m)

> Add constraints in result of “SHOW CREATE TABLE ”
> -------------------------------------------------
>
>                 Key: HIVE-25652
>                 URL: https://issues.apache.org/jira/browse/HIVE-25652
>             Project: Hive
>          Issue Type: Improvement
>            Reporter: Soumyakanti Das
>            Assignee: Soumyakanti Das
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 1h 50m
>  Remaining Estimate: 0h
>
> Currently show create table doesn’t pull any constraint info like not null, 
> defaults, primary key.
> Example:
> Create table
>  
> {code:java}
> CREATE TABLE TEST(
>   col1 varchar(100) NOT NULL COMMENT "comment for column 1",
>   col2 timestamp DEFAULT CURRENT_TIMESTAMP() COMMENT "comment for column 2",
>   col3 decimal,
>   col4 varchar(512) NOT NULL,
>   col5 varchar(100),
>   primary key(col1, col2) disable novalidate)
> ROW FORMAT SERDE
>   'org.apache.hadoop.hive.ql.io.orc.OrcSerde'
> STORED AS INPUTFORMAT
>   'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'
> OUTPUTFORMAT
>   'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';
> {code}
> Currently {{SHOW CREATE TABLE TEST}} doesn't show the column constraints.
> {code:java}
> CREATE TABLE `test`(
>   `col1` varchar(100) COMMENT 'comment for column 1', 
>   `col2` timestamp COMMENT 'comment for column 2', 
>   `col3` decimal(10,0), 
>   `col4` varchar(512), 
>   `col5` varchar(100))
> ROW FORMAT SERDE 
>   'org.apache.hadoop.hive.ql.io.orc.OrcSerde' 
> STORED AS INPUTFORMAT 
>   'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
> OUTPUTFORMAT 
>   'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to