luoyuxia commented on code in PR #19561:
URL: https://github.com/apache/flink/pull/19561#discussion_r892101497


##########
flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/parse/HiveParserDDLSemanticAnalyzer.java:
##########
@@ -506,6 +532,106 @@ private Operation convertCreateFunction(HiveParserASTNode 
ast) {
         }
     }
 
+    private Operation convertCreateMacro(HiveParserASTNode ast) throws 
SemanticException {
+        String macroName = ast.getChild(0).getText();
+        if (FunctionUtils.isQualifiedFunctionName(macroName)) {
+            throw new SemanticException("Temporary macro cannot be created 
with a qualified name.");
+        }
+
+        List<FieldSchema> arguments = getColumns((HiveParserASTNode) 
ast.getChild(1), true);
+        Set<String> actualColumnNames = getActualColumnNames(ast, arguments);
+
+        HiveParserRowResolver rowResolver = new HiveParserRowResolver();
+        List<String> macroColumnNames = new ArrayList<>();
+        List<TypeInfo> macroColumnTypes = new ArrayList<>();
+        getMacroColumnData(
+                arguments, actualColumnNames, rowResolver, macroColumnNames, 
macroColumnTypes);
+        ExprNodeDesc body = getBody(ast, arguments, rowResolver);
+
+        GenericUDFMacro macro =
+                new GenericUDFMacro(macroName, body, macroColumnNames, 
macroColumnTypes);
+
+        FunctionDefinition macroDefinition =
+                new HiveGenericUDF(
+                        new 
HiveFunctionWrapper<>(GenericUDFMacro.class.getName(), macro),
+                        hiveShim);
+        // hive's marco is more like flink's temp system function
+        return new CreateTempSystemFunctionOperation(macroName, false, 
macroDefinition);
+    }
+
+    private Set<String> getActualColumnNames(HiveParserASTNode ast, 
List<FieldSchema> arguments)
+            throws SemanticException {
+        final Set<String> actualColumnNames = new HashSet<>();
+
+        if (!arguments.isEmpty()) {
+            // Walk down expression to see which arguments are actually used.
+            Node expression = (Node) ast.getChild(2);
+
+            PreOrderWalker walker =
+                    new PreOrderWalker(
+                            (nd, stack, nodeOutputs) -> {
+                                if (nd instanceof HiveParserASTNode) {
+                                    HiveParserASTNode node = 
(HiveParserASTNode) nd;
+                                    if (node.getType() == 
HiveASTParser.TOK_TABLE_OR_COL) {
+                                        
actualColumnNames.add(node.getChild(0).getText());
+                                    }
+                                }
+                                return null;
+                            });
+            walker.startWalking(Collections.singleton(expression), null);
+        }
+        return actualColumnNames;
+    }
+
+    private void getMacroColumnData(

Review Comment:
   Return a Tuple2 is good to me. But I would like separate the 
`getMacroColumnData` and `getBody`,



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to