masahi commented on a change in pull request #9108:
URL: https://github.com/apache/tvm/pull/9108#discussion_r725965693
##########
File path: python/tvm/contrib/pipeline_executor.py
##########
@@ -139,12 +187,12 @@ def get_owner_idx(self):
if isinstance(self.io_owner, PipelineConfig.ModuleWrapper):
return self.io_owner.idx
- return 0
+ return -1
- def is_global_interface(self):
- """The global interface is the interface visible to the caller
which use a pipeline
- executor, the global input interface is responsible for passing
parameters to the
- internal module interface, and the global output interface is
responsible for
+ def is_pipeline_executor_interface(self):
+ """The pipeline interface is the interface visible to the caller
uses a pipeline
Review comment:
A broken sentence
`The pipeline interface is the interface visible to the caller uses a
pipeline executor`
##########
File path: python/tvm/contrib/pipeline_executor.py
##########
@@ -70,24 +71,71 @@ def build(pipe_configs):
)
mconf["dev"] = "{},{}".format(dev.device_type, dev.device_id)
- # Create a pipeline configuration.
+ # Create a pipeline configuration, 'mod_idx' start from 0.
Review comment:
Remove `'mod_idx' start from 0.` as it should be obvious
##########
File path: src/runtime/pipeline/pipeline_executor.h
##########
@@ -36,25 +43,115 @@ namespace runtime {
*
* This executor can be accessed by various language via TVM runtime
PackedFunc API.
*/
-class TVM_DLL PipelineRuntime : public ModuleNode {
+class TVM_DLL PipelineExecutor : public ModuleNode {
public:
/*!
* \Return the type key of the executor.
*/
- const char* type_key() const final { return "PipelineRuntime"; }
+ const char* type_key() const final { return "PipelineExecutor"; }
/*!
- * \brief Initialize the pipeline executor with module array and json text.
+ * \brief Initialize the pipeline executor with module array and JSON text.
* \param modules The module list used for building pipeline.
* \param pipeline_json The configuration of modules dependencies.
*/
- void Init(const Array<tvm::runtime::Module>& modules, const std::string&
pipeline_json);
+ void Init(const std::vector<Module>& modules, const std::string&
pipeline_json);
+ /*!
+ * \brief Use the information of mod_config to create a graph executor list.
+ * \param mod_config The configuration information generated by the library
export library
+ * function call.
Review comment:
`library export library function call` sounds weird, remove the second
`library`?
##########
File path: src/runtime/pipeline/pipeline_executor.h
##########
@@ -36,25 +43,115 @@ namespace runtime {
*
* This executor can be accessed by various language via TVM runtime
PackedFunc API.
*/
-class TVM_DLL PipelineRuntime : public ModuleNode {
+class TVM_DLL PipelineExecutor : public ModuleNode {
public:
/*!
* \Return the type key of the executor.
*/
- const char* type_key() const final { return "PipelineRuntime"; }
+ const char* type_key() const final { return "PipelineExecutor"; }
/*!
- * \brief Initialize the pipeline executor with module array and json text.
+ * \brief Initialize the pipeline executor with module array and JSON text.
* \param modules The module list used for building pipeline.
* \param pipeline_json The configuration of modules dependencies.
*/
- void Init(const Array<tvm::runtime::Module>& modules, const std::string&
pipeline_json);
+ void Init(const std::vector<Module>& modules, const std::string&
pipeline_json);
+ /*!
+ * \brief Use the information of mod_config to create a graph executor list.
+ * \param mod_config The configuration information generated by the library
export library
+ * function call.
+ */
+ std::vector<Module> CreateGraphModules(const ModuleConfig& mod_config);
/*!
* \brief Give frontends an access to packed functions.
* \param name The name of the function.
* \param sptr_to_self The pointer to the module node.
* \return The corresponding packed function.
*/
virtual PackedFunc GetFunction(const std::string& name, const
ObjectPtr<Object>& sptr_to_self);
+
+ /*!
+ * \brief Get the number of outputs.
+ *
+ * \return The number of outputs.
+ */
+ int NumOutputs() const { return num_outputs_; }
+
+ /*!\brief Load the module files information.*/
+ ModuleConfig& LoadModuleConfig(dmlc::JSONReader* reader) {
+ reader->BeginArray();
+ while (reader->NextArrayItem()) {
+ std::string key;
+ reader->BeginObject();
+ int mod_idx = -1;
+ std::string lib_name;
+ std::string json_name;
+ std::string params_name;
+ std::string dev;
+ while (reader->NextObjectItem(&key)) {
+ if (key == "mod_idx") {
+ reader->Read(&mod_idx);
+ } else if (key == "lib_name") {
+ reader->Read(&lib_name);
+ } else if (key == "json_name") {
+ reader->Read(&json_name);
+ } else if (key == "params_name") {
+ reader->Read(¶ms_name);
+ } else if (key == "dev") {
+ reader->Read(&dev);
+ } else {
+ LOG(FATAL) << "do not support key " << key;
+ }
+ }
+ // Check if mod_idx is read successfully, in this level reading there
all the moudles
+ // are graph executor modules, hence the mod_idx should start from 0.
+ ICHECK(mod_idx >= 0) << "Invalid mod_idx value " << mod_idx;
Review comment:
Broken sentence and typo `in this level reading there all the moudles
are graph executor module`
##########
File path: src/runtime/pipeline/pipeline_struct.h
##########
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+#ifndef TVM_RUNTIME_PIPELINE_PIPELINE_STRUCT_H_
+#define TVM_RUNTIME_PIPELINE_PIPELINE_STRUCT_H_
+#include <assert.h>
+#include <dlpack/dlpack.h>
+#include <dmlc/json.h>
+
+#include <limits>
+#include <string>
+#include <unordered_map>
+#include <vector>
+#define PIPELINE_EXECUTOR_INDEX -1
+/*!
+ * \brief All binding information of a output interface.
+ */
+struct OutputBindings {
+ /*!\brief Output interface binding information, 'int' is the index of the
module that
+ * uses this output data as the input interface data, 'string' is the input
interface name
+ * of the module.
+ */
+ std::unordered_map<int, std::string> bindings;
+ /*!
+ * \brief If there is one PipelineExecutor binding in bindings, then the
current output is
+ * PipelineExecutor interface.
+ * \return Whether this output interface is PipelineExecutor output
interface.
+ */
+ bool IsGlobalOutput() const {
+ int num_output = 0;
+ for (auto binding : bindings) {
+ /* The output is a PipelineExecutor output when the index value
+ * equal PIPELINE_EXECUTOR_INDEX.
+ */
+ num_output += (binding.first == PIPELINE_EXECUTOR_INDEX);
+ }
+ /* If this output is a global output then there is only one such output in
map.*/
+ ICHECK(num_output <= 1);
+ return num_output == 1;
+ }
+ /*!
+ * \brief Create a module interface map from JSONReader.
+ * \param reader JSON reader.
+ */
+ void Load(dmlc::JSONReader* reader) {
+ reader->BeginArray();
+ while (reader->NextArrayItem()) {
+ std::string key;
+ reader->BeginObject();
+ std::string input_name;
+ int mod_idx = std::numeric_limits<int>::min();
+ while (reader->NextObjectItem(&key)) {
+ if (key == "mod_idx") {
+ reader->Read(&mod_idx);
+ }
+ if (key == "input_name") {
+ reader->Read(&input_name);
+ }
+ }
+ // In this level 'Load' that reading the output binding , the module can
be
+ // a 'PipelineExecutor', hence the value of 'mod_idx' should start from
+ // PIPELINE_EXECUTOR_INDEX.
+ ICHECK(mod_idx >= PIPELINE_EXECUTOR_INDEX);
+ bindings[mod_idx] = input_name;
Review comment:
I think it is better to introduce a dedicated `string` variable for
`bindings[PIPELINE_EXECUTOR_INDEX]`. And remove `PIPELINE_EXECUTOR_INDEX` which
is super weird. So how about something like
```
std::string global_input_name; (or other sensible name)
std::unordered_map<int, std::string> bindings;
```
##########
File path: src/runtime/pipeline/pipeline_executor.h
##########
@@ -36,25 +43,115 @@ namespace runtime {
*
* This executor can be accessed by various language via TVM runtime
PackedFunc API.
*/
-class TVM_DLL PipelineRuntime : public ModuleNode {
+class TVM_DLL PipelineExecutor : public ModuleNode {
public:
/*!
* \Return the type key of the executor.
*/
- const char* type_key() const final { return "PipelineRuntime"; }
+ const char* type_key() const final { return "PipelineExecutor"; }
/*!
- * \brief Initialize the pipeline executor with module array and json text.
+ * \brief Initialize the pipeline executor with module array and JSON text.
* \param modules The module list used for building pipeline.
* \param pipeline_json The configuration of modules dependencies.
*/
- void Init(const Array<tvm::runtime::Module>& modules, const std::string&
pipeline_json);
+ void Init(const std::vector<Module>& modules, const std::string&
pipeline_json);
+ /*!
+ * \brief Use the information of mod_config to create a graph executor list.
+ * \param mod_config The configuration information generated by the library
export library
+ * function call.
+ */
+ std::vector<Module> CreateGraphModules(const ModuleConfig& mod_config);
/*!
* \brief Give frontends an access to packed functions.
* \param name The name of the function.
* \param sptr_to_self The pointer to the module node.
* \return The corresponding packed function.
*/
virtual PackedFunc GetFunction(const std::string& name, const
ObjectPtr<Object>& sptr_to_self);
+
+ /*!
+ * \brief Get the number of outputs.
+ *
+ * \return The number of outputs.
+ */
+ int NumOutputs() const { return num_outputs_; }
+
+ /*!\brief Load the module files information.*/
+ ModuleConfig& LoadModuleConfig(dmlc::JSONReader* reader) {
+ reader->BeginArray();
+ while (reader->NextArrayItem()) {
+ std::string key;
+ reader->BeginObject();
+ int mod_idx = -1;
+ std::string lib_name;
+ std::string json_name;
+ std::string params_name;
+ std::string dev;
+ while (reader->NextObjectItem(&key)) {
+ if (key == "mod_idx") {
+ reader->Read(&mod_idx);
+ } else if (key == "lib_name") {
+ reader->Read(&lib_name);
+ } else if (key == "json_name") {
+ reader->Read(&json_name);
+ } else if (key == "params_name") {
+ reader->Read(¶ms_name);
+ } else if (key == "dev") {
+ reader->Read(&dev);
+ } else {
+ LOG(FATAL) << "do not support key " << key;
+ }
+ }
+ // Check if mod_idx is read successfully, in this level reading there
all the moudles
+ // are graph executor modules, hence the mod_idx should start from 0.
+ ICHECK(mod_idx >= 0) << "Invalid mod_idx value " << mod_idx;
+ // Load the lib, json, and params information.
+ ICHECK(!lib_name.empty()) << "lib_name is empty.";
+ ICHECK(!json_name.empty()) << "json_name is empty.";
+ ICHECK(!params_name.empty()) << "params_name is empty.";
+ mod_config_[mod_idx] = GraphModuleLoadInfo(lib_name, json_name,
params_name, dev);
+ }
+ return mod_config_;
+ }
+
+ private:
+ /*!\brief The class used to execute and schedule the pipeline logic.*/
+ PipelineScheduler pipeline_scheduler_;
+ /*!\brief The Dependency information of each graph runtime module of the
pipeline.*/
+ PipelineConfig pipeline_config_;
+ /*!\brief The Module information used to create the graph runtimes.*/
+ ModuleConfig mod_config_;
+ /*!\brief How many outputs are in this pipeline executor.*/
+ size_t num_outputs_ = 0;
+ /*!\brief Json loader.*/
+ PipelineConfig& LoadPipelineConfig(dmlc::JSONReader* reader) {
+ reader->BeginArray();
+ while (reader->NextArrayItem()) {
+ std::string key;
+ reader->BeginObject();
+ int mod_idx = -1;
+ OutputMap output;
+ std::string dev;
+ while (reader->NextObjectItem(&key)) {
+ if (key == "mod_idx") {
+ reader->Read(&mod_idx);
+ } else if (key == "dev") {
+ reader->Read(&dev);
+ } else if (key == "output") {
+ reader->Read(&output);
+ } else {
+ LOG(FATAL) << "do not support key " << key;
+ }
+ }
+ // Check if mod_idx is read successfully, in this level reading there is
no any moudle
+ // is PipelineExecutor, hence the mod_idx should start from 0.
Review comment:
Same comment as above, this is not ok
##########
File path: python/tvm/contrib/pipeline_executor.py
##########
@@ -532,12 +577,73 @@ def graph_executor_create(self, pipeline_mods,
mod_config):
mod_config : str
The Modudle configuration.
"""
+ # Should store modules in the list named 'mods' in index order.
+ mods = [None for _ in range(len(pipeline_mods))]
+ for lib_index in pipeline_mods:
+ pipeline_lib = pipeline_mods[lib_index]["lib"]
+ dev = pipeline_mods[lib_index]["dev"]
+ lib = graph_executor.GraphModule(pipeline_lib["default"](dev))
+ # Return a module list sorted by lib_index.
+ mods[lib_index] = lib.module
+
+ return mods, json.dumps(mod_config)
+
+ def export_library(self, directory_path):
+ """Export the pipeline executor into disk files.
- mods = []
- for pipeline_mod in pipeline_mods:
- mod = graph_executor.GraphModule(
- pipeline_mod["default"](pipeline_mods[pipeline_mod]["dev"])
+ Parameters
+ ----------
+ directory_path : str
+ Export the files to this directory.
+ """
+ if not self.pipeline_mods:
+ raise RuntimeError(f"The pipeline executor has not been
initialized.")
+
+ # Check if the directory_path exists.
+ if not os.path.exists(directory_path):
+ raise RuntimeError(f"The directory {directory_path} does not
exist.")
+ # Create an load configuration.
+ load_config_file_name = "{}/load_config".format(directory_path)
+ pipeline_config_file_name = "{}/pipeline_config".format(directory_path)
+ config = {}
+ config["load_config"] = load_config_file_name
+ config["pipeline_config"] = pipeline_config_file_name
+ load_config = []
+ # Export the library, JSON, and parameter into files, then export
these files path
+ # into a configuration file.
+ for lib_index in self.pipeline_mods:
+ mconfig = {}
+ mconfig["mod_idx"] = lib_index
+ mconfig["lib_name"] = "{}/lib{}.so".format(directory_path,
lib_index)
+ mconfig["json_name"] = "{}/json{}".format(directory_path,
lib_index)
+ mconfig["params_name"] = "{}/params{}".format(directory_path,
lib_index)
+ mconfig["dev"] = "{},{}".format(
+ self.pipeline_mods[lib_index]["dev"].device_type,
+ self.pipeline_mods[lib_index]["dev"].device_id,
)
- mods.append(mod.module)
- return mods, json.dumps(mod_config)
+ # Get the graph, lib, and parameters from
GraphExecutorFactoryModule.
+ graph, lib, params = self.pipeline_mods[lib_index]["lib"]
+ # Export the lib, graph, and parameters to disk.
+ lib.export_library(mconfig["lib_name"])
+ with open(mconfig["json_name"], "w") as file_handle:
+ file_handle.write(graph)
+ with open(mconfig["params_name"], "wb") as file_handle:
+ file_handle.write(relay.save_param_dict(params))
+
+ load_config.append(mconfig)
+
+ # Export the configuration file to disk.
+ with open(load_config_file_name, "w") as file_handle:
+ json.dump(load_config, file_handle)
+
+ # Export the pipeline configuration file to disk.
+ with open(pipeline_config_file_name, "w") as file_handle:
+ json.dump(self.mods_config, file_handle)
+
+ # Export the configuration file to disk.
Review comment:
Remove `Export to ...` comments at L636, L640 and L644 as the code is
self explanatory.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]