areusch commented on a change in pull request #8280:
URL: https://github.com/apache/tvm/pull/8280#discussion_r665615332



##########
File path: include/tvm/runtime/module.h
##########
@@ -232,6 +232,8 @@ constexpr const char* tvm_param_prefix = "__tvm_param__";
 constexpr const char* tvm_lookup_linked_param = "_lookup_linked_param";
 /*! \brief The main AOT executor function */
 constexpr const char* tvm_run_func_suffix = "run_model";

Review comment:
       do we need both this and the one below?

##########
File path: include/tvm/runtime/module.h
##########
@@ -232,6 +232,8 @@ constexpr const char* tvm_param_prefix = "__tvm_param__";
 constexpr const char* tvm_lookup_linked_param = "_lookup_linked_param";
 /*! \brief The main AOT executor function */
 constexpr const char* tvm_run_func_suffix = "run_model";
+/*! \brief The models entrypoint function which calls the executor */

Review comment:
       since Executor is an interface and run_func doesn't conform to that, 
let's find another name here.

##########
File path: src/target/source/source_module.cc
##########
@@ -220,37 +221,85 @@ class CSourceCrtMetadataModuleNode : public 
runtime::ModuleNode {
     code_ << "}\n";
   }
 
-  void GenerateEntrypointForPackedAPI(const std::string& run_func) {
+  void GenerateEntrypointForPackedAPI(const std::string& entrypoint_name,
+                                      const std::string& run_func) {
     code_ << "TVM_DLL int32_t " << run_func;
     code_ << "(void* args, void* type_code, int num_args, void* out_value, 
void* "
              "out_type_code, void* resource_handle);\n";
-    code_ << "static int32_t " << ::tvm::runtime::symbol::tvm_module_main;
+    code_ << "int32_t " << entrypoint_name;
     code_ << "(void* args, void* type_code, int num_args, void* out_value, 
void* "
              "out_type_code, void* resource_handle) {\n";
     code_ << "return " << run_func;
     code_ << "(args, type_code, num_args, out_value, out_type_code, 
resource_handle);\n";
     code_ << "}\n";
   }
 
+  void GenerateCInterfaceEntrypoint(const std::string& entrypoint_name, const 
std::string& run_func,
+                                    const std::string& mod_name) {
+    code_ << "#include <" << mod_name << ".h>\n";

Review comment:
       will this also be tvmgen_?

##########
File path: src/target/source/source_module.cc
##########
@@ -220,37 +221,85 @@ class CSourceCrtMetadataModuleNode : public 
runtime::ModuleNode {
     code_ << "}\n";
   }
 
-  void GenerateEntrypointForPackedAPI(const std::string& run_func) {
+  void GenerateEntrypointForPackedAPI(const std::string& entrypoint_name,
+                                      const std::string& run_func) {
     code_ << "TVM_DLL int32_t " << run_func;
     code_ << "(void* args, void* type_code, int num_args, void* out_value, 
void* "
              "out_type_code, void* resource_handle);\n";
-    code_ << "static int32_t " << ::tvm::runtime::symbol::tvm_module_main;
+    code_ << "int32_t " << entrypoint_name;
     code_ << "(void* args, void* type_code, int num_args, void* out_value, 
void* "
              "out_type_code, void* resource_handle) {\n";
     code_ << "return " << run_func;
     code_ << "(args, type_code, num_args, out_value, out_type_code, 
resource_handle);\n";
     code_ << "}\n";
   }
 
+  void GenerateCInterfaceEntrypoint(const std::string& entrypoint_name, const 
std::string& run_func,
+                                    const std::string& mod_name) {
+    code_ << "#include <" << mod_name << ".h>\n";
+    code_ << "TVM_DLL int32_t " << run_func << "(";
+    unsigned int total_args = (metadata_->inputs.size() + 
metadata_->num_outputs);
+    for (unsigned int i = 0; i < total_args; ++i) {
+      code_ << "void* arg" << i;
+      if (i + 1 != total_args) {
+        code_ << ",";
+      }
+    }
+    code_ << ");\n";
+    code_ << "int32_t " << entrypoint_name << "(";
+    code_ << "struct " << runtime::get_name_mangled(mod_name, "inputs") << "* 
inputs,"
+          << "struct " << runtime::get_name_mangled(mod_name, "outputs") << "* 
outputs,"
+          << "struct " << runtime::get_name_mangled(mod_name, "memory") << "* 
memory,"
+          << "struct " << runtime::get_name_mangled(mod_name, "devices") << "* 
devices"
+          << ") {";
+    code_ << "return " << run_func << "(";
+    for (const auto& input : metadata_->inputs) {
+      code_ << "inputs->" << input->name_hint() << ",";
+    }
+    if (metadata_->num_outputs == 1) {
+      code_ << "outputs->output";
+    } else {
+      for (int i = 0; i < metadata_->num_outputs; ++i) {
+        code_ << "outputs->output" << i;
+        if (i + 1 != metadata_->num_outputs) {
+          code_ << ",";
+        }
+      }
+    }
+    code_ << ");\n";
+    code_ << "}\n";
+  }
+
   void GenerateAOTDescriptor() {
-    const std::string run_func = ::tvm::runtime::symbol::tvm_run_func_suffix;
-    const std::string run_func_mangled = 
runtime::get_name_mangled(metadata_->mod_name, run_func);
+    const std::string run_func_suffix = 
::tvm::runtime::symbol::tvm_run_func_suffix;
+    const std::string tvm_entrypoint_suffix = 
::tvm::runtime::symbol::tvm_entrypoint_suffix;
+    const std::string run_func_mangled =
+        runtime::get_name_mangled(metadata_->mod_name, run_func_suffix);
+    const std::string entrypoint_mangled =
+        runtime::get_name_mangled(metadata_->mod_name, tvm_entrypoint_suffix);
     const std::string network_mangled = 
runtime::get_name_mangled(metadata_->mod_name, "network");
-    code_ << "#include 
\"tvm/runtime/crt/internal/aot_executor/aot_executor.h\"\n";
+    auto unpacked_api = 
target_->GetAttr<Bool>("unpacked-api").value_or(Bool(false));
+    auto interface_api = 
target_->GetAttr<String>("interface-api").value_or(String("packed"));
+
     code_ << "#include \"tvm/runtime/c_runtime_api.h\"\n";
     code_ << "#ifdef __cplusplus\n";
-    code_ << "extern \"C\"\n";
+    code_ << "extern \"C\" {\n";
     code_ << "#endif\n";
-    if (target_->GetAttr<Bool>("unpacked-api").value_or(Bool(false))) {
-      GenerateEntrypointForUnpackedAPI(run_func_mangled);
+
+    if (unpacked_api) {
+      if (interface_api == "c") {
+        GenerateCInterfaceEntrypoint(entrypoint_mangled, run_func_mangled, 
metadata_->mod_name);
+      } else {
+        GenerateEntrypointForUnpackedAPI(entrypoint_mangled, run_func_mangled);
+      }
     } else {
-      GenerateEntrypointForPackedAPI(run_func_mangled);
+      ICHECK_EQ(interface_api, "packed") << "Packed interface required for 
packed operators";

Review comment:
       technically we could support packed API with c interface, correct? we'd 
just change the input/output tensor types to be TVMValue.

##########
File path: python/tvm/micro/model_library_format.py
##########
@@ -207,6 +208,42 @@ def _build_function_memory_map(function_metadata):
     return ret
 
 
+def _get_main_relay_func(mod: executor_factory.ExecutorFactoryModule):
+    main_func = mod.function_metadata[MAIN_FUNC_NAME_STR]
+    target = list(main_func.relay_primfuncs.keys())[0]
+    return main_func.relay_primfuncs[target]
+
+
+def _convert_tuple_to_outputs(ret_type, offset=0):
+    outputs = []
+    added_fields = len(ret_type.fields)
+    for output_index in range(added_fields):
+        next_output = offset + len(outputs)
+        if isinstance(ret_type.fields[output_index], TupleType):
+            
outputs.extend(_convert_tuple_to_outputs(ret_type.fields[output_index], 
next_output))
+        else:
+            outputs.append(f"output{next_output}")
+    return outputs
+
+
+def _get_inputs_and_outputs_from_module(mod):
+    main_func = _get_main_relay_func(mod)
+    inputs = [argument.name_hint for argument in main_func.params]
+
+    outputs = ["output"]
+    if isinstance(main_func.ret_type, TupleType):
+        outputs = _convert_tuple_to_outputs(main_func.ret_type)
+
+    return inputs, outputs
+
+
+def _should_generate_interface_header(mod):
+    for _, target in mod.target.items():

Review comment:
       i think you can simplify to:
   `return any(t.attr.get("interface-api") == "c" for t in mod.target.values())`




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to