kasakrisz commented on code in PR #2855:
URL: https://github.com/apache/hive/pull/2855#discussion_r890857032
##########
ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java:
##########
@@ -264,9 +257,104 @@ private void reparseAndSuperAnalyze(ASTNode tree) throws
SemanticException {
}
}
- private void validateTxnManager(Table mTable) throws SemanticException {
- if (!AcidUtils.acidTableWithoutTransactions(mTable) &&
!getTxnMgr().supportsAcid()) {
- throw new SemanticException(ErrorMsg.ACID_OP_ON_NONACID_TXNMGR.getMsg());
+ private void analyzeSplitUpdate(ASTNode tree, Table mTable, ASTNode
tabNameNode) throws SemanticException {
+ operation = Context.Operation.UPDATE;
+
+ List<? extends Node> children = tree.getChildren();
+
+ ASTNode where = null;
+ int whereIndex = 2;
+ if (children.size() > whereIndex) {
+ where = (ASTNode) children.get(whereIndex);
+ assert where.getToken().getType() == HiveParser.TOK_WHERE :
+ "Expected where clause, but found " + where.getName();
+ }
+
+ Set<String> setRCols = new LinkedHashSet<>();
+// TOK_UPDATE_TABLE
+// TOK_TABNAME
+// ...
+// TOK_SET_COLUMNS_CLAUSE <- The set list from update should be the
second child (index 1)
+ assert children.size() >= 2 : "Expected update token to have at least two
children";
+ ASTNode setClause = (ASTNode) children.get(1);
+ Map<String, ASTNode> setCols = collectSetColumnsAndExpressions(setClause,
setRCols, mTable);
+ Map<Integer, ASTNode> setColExprs = new
HashMap<>(setClause.getChildCount());
+
+ List<FieldSchema> nonPartCols = mTable.getCols();
+ Map<String, String> colNameToDefaultConstraint =
getColNameToDefaultValueMap(mTable);
+ List<String> values = new ArrayList<>(mTable.getCols().size());
+ StringBuilder rewrittenQueryStr = createRewrittenQueryStrBuilder();
+ rewrittenQueryStr.append("(SELECT ROW__ID");
+ for (int i = 0; i < nonPartCols.size(); i++) {
+ rewrittenQueryStr.append(',');
+ String name = nonPartCols.get(i).getName();
+ ASTNode setCol = setCols.get(name);
+ String identifier = HiveUtils.unparseIdentifier(name, this.conf);
+
+ if (setCol != null) {
+ if (setCol.getType() == HiveParser.TOK_TABLE_OR_COL &&
+ setCol.getChildCount() == 1 && setCol.getChild(0).getType() ==
HiveParser.TOK_DEFAULT_VALUE) {
+ rewrittenQueryStr.append(colNameToDefaultConstraint.get(name));
Review Comment:
These are some tests:
https://github.com/apache/hive/blob/cdb1052e24ca493c6486fef3dd8956dde61be834/ql/src/test/queries/clientpositive/insert_into_default_keyword.q#L97
`DEFAULT` keyword should be handled here. Example the statement
```
UPDATE insert_into1_n0 set key = DEFAULT where value=1;
```
is rewritten to
```
FROM
(SELECT ROW__ID,1 AS `key`,`value` AS `value`,`i` AS `i` FROM
`default`.`insert_into1_n0`) s
INSERT INTO `default`.`insert_into1_n0`
SELECT s.`key`,s.`value`,s.`i`
INSERT INTO `default`.`insert_into1_n0`
SELECT s.ROW__ID
SORT BY s.ROW__ID
```
`DEFAULT` keyword is replaced to the constant `1` in the select clause of
the multi insert.
If it is not replaced then we have to replace it in the first insert branch
so we can't get rid of `DEFAULT` keyword handling while rewriting the statement.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]