zhiics commented on a change in pull request #6162:
URL: https://github.com/apache/incubator-tvm/pull/6162#discussion_r464201530



##########
File path: src/parser/meta_ref.cc
##########
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*!
+ * \file src/parser/meta_ref.cc
+ * \brief An operator which allows forward referencing a yet-to-be parsed meta 
table reference.
+ */
+
+#include "./meta_ref.h"
+
+#include <topi/elemwise.h>
+#include <tvm/relay/expr_functor.h>
+#include <tvm/relay/op.h>
+#include <tvm/relay/op_attr_types.h>
+#include <tvm/relay/transform.h>
+
+namespace tvm {
+namespace parser {
+
+using tvm::relay::transform::CreateFunctionPass;
+using tvm::transform::PassContext;
+
+TVM_REGISTER_NODE_TYPE(MetaRefAttrs);
+
+bool MetaRefRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
+                const TypeReporter& reporter) {
+  LOG(FATAL) << "need to expand before type checking";
+  return true;
+}
+
+RELAY_REGISTER_OP("parser.MetaRef")
+    .describe(R"code(A reference into the meta table.)code" TVM_ADD_FILELINE)
+    .set_attrs_type<MetaRefAttrs>()
+    .set_num_inputs(0)
+    .set_support_level(10)
+    .add_type_rel("MetaRef", MetaRefRel)
+    .set_attr<TOpIsStateful>("TOpIsStateful", false)
+    .set_attr<TNonComputational>("TNonComputational", true);
+
+Expr MetaRef(std::string type_key, uint64_t node_index) {
+  static const Op& op = Op::Get("parser.MetaRef");
+  auto attrs = make_object<MetaRefAttrs>();
+  attrs->node_type_key = tvm::String(type_key);
+  attrs->node_index = node_index;
+  return Call(op, {}, Attrs(attrs), {});
+}
+
+// class MetaRefAttrExpander : AttrFunctor<ObjectRef(const ObjectRef& n)> {
+//     ObjectRef VisitAttrDefault_(const Object* node) final {
+
+//     }
+// }
+
+struct MetaRefExpander : public ExprMutator {
+  MetaTable table;
+
+  explicit MetaRefExpander(const MetaTable& table) : table(table) {}
+
+  Expr VisitExpr_(const CallNode* call) final {
+    if (auto op_node = call->op.as<OpNode>()) {
+      if (op_node->name == "parser.MetaRef") {
+        auto meta_attrs = call->attrs.as<MetaRefAttrs>();
+        CHECK(meta_attrs) << "an internal error has occurred";
+        auto nodes = table.at(meta_attrs->node_type_key);
+        CHECK_LT(meta_attrs->node_index, nodes.size());
+        return Downcast<Expr>(nodes[meta_attrs->node_index]);
+      }
+    }
+
+    return ExprMutator::VisitExpr_(call);
+  }
+};
+
+Function ExpandMetaRefs(const MetaTable& meta_table, const relay::Function& 
func) {
+  MetaRefExpander expander(meta_table);
+  return Downcast<Function>(expander.VisitExpr(func));
+}
+
+IRModule ExpandMetaRefs(const MetaTable& meta_table, const IRModule& mod) {
+  auto pass = CreateFunctionPass([&](Function func, IRModule module,
+                                     PassContext ctx) { return 
ExpandMetaRefs(meta_table, func); },
+                                 1337, "ExpandMetaRefs", {});

Review comment:
       Why the opt_level is 1337? Do we just want to set it with a large number?

##########
File path: src/printer/text_printer.h
##########
@@ -355,14 +355,19 @@ namespace tvm {
 class TextPrinter {
  public:
   explicit TextPrinter(bool show_meta_data,
-                       const runtime::TypedPackedFunc<std::string(ObjectRef)>& 
annotate)
+                       const runtime::TypedPackedFunc<std::string(ObjectRef)>& 
annotate,
+                       bool show_warning = true)
       : show_meta_data_(show_meta_data),
+        show_warning_(show_warning),
         annotate_(annotate),
         relay_text_printer_(show_meta_data, &meta_, annotate),
         tir_text_printer_(show_meta_data, &meta_) {}
 
   /*! \brief whether show meta data */
   bool show_meta_data_;
+  /*! \brief whether show meta data */

Review comment:
       ```suggestion
     /*! \brief whether show warning */
   ```

##########
File path: src/parser/parser.cc
##########
@@ -542,39 +501,60 @@ class Parser {
    */
   template <typename T>
   Array<T> ParseSequence(TokenType start, TokenType sep, TokenType stop, 
std::function<T()> parse,
-                         std::function<void()> before_stop = nullptr) {
+                         std::function<bool()> before_stop = nullptr) {
+    DLOG(INFO) << "Parser::ParseSequence: start=" << start << "sep=" << sep << 
"stop=" << stop;
     Match(start);
+
+    // This is for the empty arguments list case, if we have <start> 
<leftovers> <stop> token stream
+    // we must parse leftovers, then match a stop token.
+    if (before_stop) {
+      auto did_parse = before_stop();
+      if (did_parse) {
+        Match(stop);
+        return {};
+      }
+    }
+
+    // This is the case in which we find an empty arguments lists and no 
leftovers.
     if (WhenMatch(stop)) {
       return Array<T>();
     } else {
       auto data = parse();
       Array<T> elements = {data};
 
-      // parse '(' expr ')'
+      // parse '(' expr ','? ')'
       // if we are at the end invoke leftover parser
-      if (Peek()->token_type == stop && before_stop) {
-        before_stop();
-      }
+      // if (Peek()->token_type == sep && before_stop) {
+      //  before_stop();
+      // }

Review comment:
       clean up




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to