comaniac commented on a change in pull request #9108:
URL: https://github.com/apache/tvm/pull/9108#discussion_r724385800



##########
File path: src/runtime/pipeline/pipeline_executor.cc
##########
@@ -21,31 +21,51 @@
  * \file pipeline_executor.cc
  */
 #include "pipeline_executor.h"
-
 namespace tvm {
 namespace runtime {
-
-void PipelineRuntime::Init(const Array<tvm::runtime::Module>& modules,
-                           const std::string& pipeline_json) {
-  return;
-}
-
-/* GetFunction can not be pure abstract function, implement an empty function 
for now.
+/*!
+ * \brief Give frontends an access to packed functions.
+ * \param name The name of the function.
+ * \param sptr_to_self The pointer to the module node.
+ * \return The corresponding packed function.
  */
-PackedFunc PipelineRuntime::GetFunction(const std::string& name,
-                                        const ObjectPtr<Object>& sptr_to_self) 
{
+PackedFunc PipelineExecutor::GetFunction(const std::string& name,
+                                         const ObjectPtr<Object>& 
sptr_to_self) {
+  if (name == "get_num_outputs") {
+    return PackedFunc(
+        [sptr_to_self, this](TVMArgs args, TVMRetValue* rv) { *rv = 
this->NumOutputs(); });
+  } else {
+    LOG(FATAL) << "Unknown packed function: " << name;
+    return PackedFunc();
+  }
   return nullptr;
 }
+/*!
+ * \brief Initialize the pipeline executor with a list of modules to be 
pipelined
+ *  and config in JSON format.
+ * \param modules The module list used for building the pipeline.
+ * \param pipeline_json The configuration of modules dependencies.
+ */
+void PipelineExecutor::Init(const Array<Module>& modules, const std::string& 
pipeline_json) {
+  // Use JSONReader to load pipeline configuration.
+  std::istringstream is(pipeline_json);
+  dmlc::JSONReader reader(&is);
+  // When the value of 'modules' is empty, here need to load the modules from 
JSON.
+  this->Load(&reader, modules.empty());

Review comment:
       `load_module=false` doesn't make sense to me. It's better to just have
   ```
   if (modules.empty()) {
     CHECK(pipeline_json) << "Pipeline configuration cannot be empty when no 
modules are provided";
     std::istringstream is(pipeline_json);
     dmlc::JSONReader reader(&is);
     this->Load(&reader);
   } else if (pipeline_json) {
     LOG(WARNING) << "Pipeline configuration is ignored because pipeline 
modules are already given";
   }
   ```

##########
File path: src/runtime/pipeline/pipeline_executor.h
##########
@@ -36,25 +43,86 @@ namespace runtime {
  *
  *  This executor can be accessed by various language via TVM runtime 
PackedFunc API.
  */
-class TVM_DLL PipelineRuntime : public ModuleNode {
+class TVM_DLL PipelineExecutor : public ModuleNode {
  public:
   /*!
    * \Return the type key of the executor.
    */
-  const char* type_key() const final { return "PipelineRuntime"; }
+  const char* type_key() const final { return "PipelineExecutor"; }
   /*!
-   * \brief Initialize the pipeline executor with module array and json text.
+   * \brief Initialize the pipeline executor with module array and JSON text.
    * \param modules The module list used for building pipeline.
    * \param pipeline_json The configuration of modules dependencies.
    */
-  void Init(const Array<tvm::runtime::Module>& modules, const std::string& 
pipeline_json);
+  void Init(const Array<Module>& modules, const std::string& pipeline_json);
   /*!
    * \brief Give frontends an access to packed functions.
    * \param name The name of the function.
    * \param sptr_to_self The pointer to the module node.
    * \return The corresponding packed function.
    */
   virtual PackedFunc GetFunction(const std::string& name, const 
ObjectPtr<Object>& sptr_to_self);
+
+  /*!
+   * \brief Get the number of outputs.
+   *
+   * \return The number of outputs.
+   */
+  int NumOutputs() const { return num_outputs_; }
+
+ private:
+  /*!\brief The class used to execute pipeline logic.*/
+  PipelineScheduler pipeline_function_;
+  /*!\brief The Dependency information of each graph runtime module of the 
pipeline.*/
+  PipelineConfig pipeline_config_;
+  /*!\brief The Module information used to create graph runtime.*/
+  ModuleConfig mod_config_;
+  /*!\brief How many outputs are in this pipeline executor.*/
+  size_t num_outputs_ = 0;
+  /*!\brief Json loader.*/
+  void Load(dmlc::JSONReader* reader, bool load_module = false) {
+    reader->BeginArray();
+    while (reader->NextArrayItem()) {
+      std::string key;
+      reader->BeginObject();
+      int mod_idx = -1;
+      std::string lib_name;
+      std::string json_name;
+      std::string params_name;
+      std::string dev;
+      OutputMap output;
+      while (reader->NextObjectItem(&key)) {
+        if (key == "mod_idx") {
+          reader->Read(&mod_idx);
+        } else if (key == "lib_name") {
+          reader->Read(&lib_name);
+        } else if (key == "json_name") {
+          reader->Read(&json_name);
+        } else if (key == "params_name") {
+          reader->Read(&params_name);
+        } else if (key == "dev") {
+          reader->Read(&dev);
+        } else if (key == "output") {
+          reader->Read(&output);
+        } else {
+          LOG(FATAL) << "do not support key " << key;
+        }
+      }
+      // Check if mod_idx is read successfully, in this level reading there is 
no any moudle
+      // is PipelineExecutor, hence the mod_idx should start from 0.
+      ICHECK(mod_idx >= 0) << "Invalid mod_idx value " << mod_idx;
+      // Check if the output is read successfully.
+      ICHECK(!output.Empty()) << "Invalid output binding result.";
+      pipeline_config_.Insert(mod_idx, output);
+      // Load the lib, json, and params information.
+      if (load_module) {

Review comment:
       Echo my previous comment, remove this if-condition.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to