lirui-apache commented on a change in pull request #16416:
URL: https://github.com/apache/flink/pull/16416#discussion_r672275662
##########
File path:
flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/parse/HiveParserDDLSemanticAnalyzer.java
##########
@@ -1494,26 +1848,95 @@ private Serializable analyzeAlterTableAddParts(
}
// add the last one
- if (currentPart != null) {
- addPartitionDesc.addPartition(currentPart, currentLocation);
+ if (currentPartSpec != null) {
+ specs.add(new CatalogPartitionSpec(currentPartSpec));
+ Map<String, String> props = new HashMap<>();
+ if (currentLocation != null) {
+ props.put(TABLE_LOCATION_URI, currentLocation);
+ }
+ partitions.add(new CatalogPartitionImpl(props, null));
}
- return new DDLWork(getInputs(), getOutputs(), addPartitionDesc);
+ ObjectIdentifier tableIdentifier =
+ tab.getDbName() == null
+ ? parseObjectIdentifier(tab.getTableName())
+ : catalogManager.qualifyIdentifier(
+ UnresolvedIdentifier.of(tab.getDbName(),
tab.getTableName()));
+ CatalogBaseTable catalogBaseTable =
getCatalogBaseTable(tableIdentifier);
+ if (catalogBaseTable instanceof CatalogView) {
+ throw new ValidationException("ADD PARTITION for a view is not
supported");
+ }
+ return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs,
partitions);
}
- // Get the partition specs from the tree
- private List<Map<String, String>> getPartitionSpecs(CommonTree ast) {
- List<Map<String, String>> partSpecs = new ArrayList<>();
- // get partition metadata if partition specified
- for (int childIndex = 0; childIndex < ast.getChildCount();
childIndex++) {
- HiveParserASTNode partSpecNode = (HiveParserASTNode)
ast.getChild(childIndex);
- // sanity check
- if (partSpecNode.getType() == HiveASTParser.TOK_PARTSPEC) {
- Map<String, String> partSpec = getPartSpec(partSpecNode);
- partSpecs.add(partSpec);
+ private Operation convertAlterViewProps(String tableName, Map<String,
String> newProps) {
+ ObjectIdentifier viewIdentifier = parseObjectIdentifier(tableName);
+ CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
+ CatalogView oldView = (CatalogView) baseTable;
+ Map<String, String> props = new HashMap<>(oldView.getOptions());
+ props.putAll(newProps);
+ CatalogView newView =
+ new CatalogViewImpl(
+ oldView.getOriginalQuery(),
+ oldView.getExpandedQuery(),
+ oldView.getSchema(),
+ props,
+ oldView.getComment());
+ return new AlterViewPropertiesOperation(viewIdentifier, newView);
+ }
+
+ private void checkAlterTableLegal(String tableName, boolean expectView) {
Review comment:
We can let this method return the old table/view and reuse it in
subsequent methods. Getting the table involves traffic to HMS so we should
restrain the number of such calls.
##########
File path:
flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/parse/HiveParserDDLSemanticAnalyzer.java
##########
@@ -939,37 +1218,80 @@ private Serializable analyzeAlterTableProps(
}
}
}
- HiveParserAlterTableDesc alterTblDesc = null;
if (isUnset) {
handleUnsupportedOperation("Unset properties not supported");
+ }
+
+ if (expectView) {
+ return convertAlterViewProps(tableName, mapProp);
} else {
- alterTblDesc =
- HiveParserAlterTableDesc.alterTableProps(
- tableName, partSpec, mapProp, expectView);
+ Map<String, String> newProps = new HashMap<>();
+ newProps.put(ALTER_TABLE_OP, CHANGE_TBL_PROPS.name());
+ newProps.putAll(mapProp);
+ return convertAlterTableProps(tableName, partSpec, newProps);
}
+ }
- return alterTblDesc;
+ private Operation convertAlterTableProps(
+ String tableName, Map<String, String> partSpec, Map<String,
String> newProps) {
+ ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
+ CatalogBaseTable catalogBaseTable =
getCatalogBaseTable(tableIdentifier);
+ if (catalogBaseTable instanceof CatalogView) {
Review comment:
Do we need the check here since we have already done it in outer method?
##########
File path:
flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/parse/HiveParserDDLSemanticAnalyzer.java
##########
@@ -544,6 +512,39 @@ private Operation convertCreateFunction(HiveParserASTNode
ast) {
}
}
+ private Operation convertAlterView(HiveParserASTNode ast) throws
SemanticException {
+ Operation operation = null;
+ String[] qualified =
+ HiveParserBaseSemanticAnalyzer.getQualifiedTableName(
+ (HiveParserASTNode) ast.getChild(0));
+ String tableName =
HiveParserBaseSemanticAnalyzer.getDotName(qualified);
+ checkAlterTableLegal(tableName, true);
+ if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
+ // alter view as
+ operation = convertCreateView(ast);
+ } else {
+ ast = (HiveParserASTNode) ast.getChild(1);
+ switch (ast.getType()) {
+ case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
+ operation = convertAlterTableProps(tableName, null, ast,
true, false);
+ break;
+ case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
+ operation = convertAlterTableProps(tableName, null, ast,
true, true);
+ break;
+ case HiveASTParser.TOK_ALTERVIEW_RENAME:
+ operation = convertAlterTableRename(tableName, ast, true);
+ break;
+ case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
+ case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
+ handleUnsupportedOperation("DROP PARTITION for view is not
supported");
Review comment:
```suggestion
handleUnsupportedOperation("ADD/DROP PARTITION for view
is not supported");
```
##########
File path:
flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/parse/HiveParserDDLSemanticAnalyzer.java
##########
@@ -1173,46 +1170,20 @@ private Operation convertDropTable(HiveParserASTNode
ast, TableType expectedType
}
}
- private void validateAlterTableType(
- Table tbl, AlterTableDesc.AlterTableTypes op, boolean expectView) {
- if (tbl.isView()) {
- if (!expectView) {
- throw new
ValidationException(ErrorMsg.ALTER_COMMAND_FOR_VIEWS.getMsg());
- }
-
- switch (op) {
- case ADDPARTITION:
- case DROPPARTITION:
- case RENAMEPARTITION:
- case ADDPROPS:
- case DROPPROPS:
- case RENAME:
- // allow this form
- break;
- default:
- throw new ValidationException(
-
ErrorMsg.ALTER_VIEW_DISALLOWED_OP.getMsg(op.toString()));
- }
- } else {
- if (expectView) {
- throw new
ValidationException(ErrorMsg.ALTER_COMMAND_FOR_TABLES.getMsg());
- }
- }
+ private void validateAlterTableType(Table tbl,
AlterTableDesc.AlterTableTypes op) {
Review comment:
`op` is not used
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]