soumyakanti3578 commented on a change in pull request #2752:
URL: https://github.com/apache/hive/pull/2752#discussion_r742571038
##########
File path: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
##########
@@ -800,19 +809,166 @@ private String getExternal(Table table) {
return table.getTableType() == TableType.EXTERNAL_TABLE ? "EXTERNAL " : "";
}
- private String getColumns(Table table) {
- List<String> columnDescs = new ArrayList<String>();
+ private String getColumns(Table table) throws HiveException{
+ List<String> columnDescs = new ArrayList<>();
+ List<String> columns =
table.getCols().stream().map(FieldSchema::getName).collect(Collectors.toList());
+ Set<String> notNullColumns = Collections.emptySet();
+ if (NotNullConstraint.isNotEmpty(table.getNotNullConstraint())) {
+ notNullColumns = new
HashSet<>(table.getNotNullConstraint().getNotNullConstraints().values());
+ }
+
+ Map<String, String> columnDefaultValueMap = Collections.emptyMap();
+ if (DefaultConstraint.isNotEmpty(table.getDefaultConstraint())) {
+ columnDefaultValueMap =
table.getDefaultConstraint().getColNameToDefaultValueMap();
+ }
+
+ List<SQLCheckConstraint> sqlCheckConstraints;
+ try {
+ sqlCheckConstraints =
Hive.get().getCheckConstraintList(table.getDbName(), table.getTableName());
+ } catch (NoSuchObjectException e) {
+ throw new HiveException(e);
+ }
+ Map<String, SQLCheckConstraint> columnCheckConstraintsMap =
sqlCheckConstraints.stream()
+ .filter(SQLCheckConstraint::isSetColumn_name)
+ .collect(Collectors.toMap(SQLCheckConstraint::getColumn_name,
Function.identity()));
+ List<SQLCheckConstraint> tableCheckConstraints =
sqlCheckConstraints.stream()
+ .filter(cc -> !cc.isSetColumn_name())
+ .collect(Collectors.toList());
+
for (FieldSchema column : table.getCols()) {
String columnType =
formatType(TypeInfoUtils.getTypeInfoFromTypeString(column.getType()));
- String columnDesc = " `" + column.getName() + "` " + columnType;
+ String columnName = column.getName();
+ StringBuilder columnDesc = new StringBuilder();
+ columnDesc.append(" `").append(columnName).append("`
").append(columnType);
+ if (notNullColumns.contains(columnName)) {
+ columnDesc.append(" NOT NULL");
+ }
+ if (columnDefaultValueMap.containsKey(columnName)) {
+ columnDesc.append(" DEFAULT
").append(columnDefaultValueMap.get(columnName));
+ }
+ if (columnCheckConstraintsMap.containsKey(columnName)) {
+
columnDesc.append(getColumnCheckConstraintDesc(columnCheckConstraintsMap.get(columnName),
columns));
+ }
if (column.getComment() != null) {
- columnDesc += " COMMENT '" +
HiveStringUtils.escapeHiveCommand(column.getComment()) + "'";
+ columnDesc.append(" COMMENT
'").append(HiveStringUtils.escapeHiveCommand(column.getComment())).append("'");
}
- columnDescs.add(columnDesc);
+ columnDescs.add(columnDesc.toString());
}
+ String pkDesc = getPrimaryKeyDesc(table);
+ if (pkDesc != null) {
+ columnDescs.add(pkDesc);
+ }
+ columnDescs.addAll(getForeignKeyDesc(table));
+ columnDescs.addAll(getTableCheckConstraintDesc(tableCheckConstraints,
columns));
return StringUtils.join(columnDescs, ", \n");
}
+ private List<String> getTableCheckConstraintDesc(List<SQLCheckConstraint>
tableCheckConstraints,
+ List<String> columns) {
+ List<String> ccDescs = new ArrayList<>();
+ for (SQLCheckConstraint constraint: tableCheckConstraints) {
+ String enable = constraint.isEnable_cstr()? " enable": " disable";
+ String validate = constraint.isValidate_cstr()? " validate": "
novalidate";
+ String rely = constraint.isRely_cstr()? " rely": " norely";
+ String expression = getCheckExpressionWithBackticks(columns, constraint);
+ ccDescs.add(" constraint " + constraint.getDc_name() + " CHECK(" +
expression +
+ ")" + enable + validate + rely);
+ }
+ return ccDescs;
+ }
+
+ private String getCheckExpressionWithBackticks(List<String> columns,
SQLCheckConstraint constraint) {
+ TreeMap<Integer, String> indexToCols = new TreeMap<>();
+ String expression = constraint.getCheck_expression();
+ for (String col: columns) {
+ int idx = expression.indexOf(col);
+ if (idx == -1) {
+ continue;
+ }
+ indexToCols.put(idx, col);
+ while (idx + col.length() < expression.length()) {
+ idx = expression.indexOf(col, idx + col.length());
+ if (idx == -1) {
+ break;
+ }
+ indexToCols.put(idx, col);
+ }
+ }
Review comment:
This is how I'm collecting the column names:
https://github.com/apache/hive/pull/2752/files#diff-f73ee0723188f6bed5fcbc3950ee71781bfa2cfc9a0cf5f558c2d6469652392bR814
So yes, they are String literals.
I did the above test that you suggested, and I'm getting SemanticException:
`org.apache.hadoop.hive.ql.parse.SemanticException: Invalid Constraint
syntax Invalid CHECK constraint expression: col4 <> 'col4'. Line 1:0 Wrong
arguments ''col4'': Unsafe compares between different types are disabled for
safety reasons. If you know what you are doing, please set
hive.strict.checks.type.safety to false and make sure that hive.mapred.mode is
not set to 'strict' to proceed. Note that you may get errors or incorrect
results if you make a mistake while using some of the unsafe features.`
So this will fail by default during the table creation phase unless
hive.strict.checks.type.safety is set to false.
So I guess this is a bit hacky and we should do it properly. I tried (still
trying) converting the expression to ASTNode, which was fine but step 2 & 3 are
a bit confusing.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]