kongroo commented on a change in pull request #8777:
URL: https://github.com/apache/tvm/pull/8777#discussion_r740012664



##########
File path: python/tvm/contrib/torch/pytorch_tvm.py
##########
@@ -0,0 +1,226 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""`compile` api that convert torch module to torch tvm module"""
+import os
+import tvm
+import tvm.testing
+from tvm import relay, autotvm
+from tvm.runtime import load_module
+from tvm.autotvm.tuner import XGBTuner, GATuner, RandomTuner, GridSearchTuner
+from tvm.contrib import graph_executor
+from tvm.contrib.debugger import debug_executor
+from . import GraphModule
+
+
+def tune_tasks(
+    tasks,
+    measure_option,
+    tuner="xgb",
+    n_trial=1000,
+    early_stopping=None,
+    log_filename="tuning.log",
+    use_transfer_learning=True,
+):
+    """Tune tasks and generate tuning log to file"""
+    # create tmp log file
+    tmp_log_file = log_filename + ".tmp"
+    if os.path.exists(tmp_log_file):
+        os.remove(tmp_log_file)
+
+    for i, tsk in enumerate(reversed(tasks)):
+        prefix = f"[Task {i + 1:2d}/{len(tasks):2d}] "
+
+        # create tuner
+        if tuner in ("xgb", "sgb-rank"):
+            tuner_obj = XGBTuner(tsk, loss_type="rank")
+        elif tuner == "ga":
+            tuner_obj = GATuner(tsk, pop_size=100)
+        elif tuner == "random":
+            tuner_obj = RandomTuner(tsk)
+        elif tuner == "gridsearch":
+            tuner_obj = GridSearchTuner(tsk)
+        else:
+            raise ValueError("Invalid tuner: " + tuner)
+
+        if use_transfer_learning:
+            if os.path.isfile(tmp_log_file):
+                
tuner_obj.load_history(autotvm.record.load_from_file(tmp_log_file))
+
+        # do tuning
+        tsk_trial = min(n_trial, len(tsk.config_space))
+        tuner_obj.tune(
+            n_trial=tsk_trial,
+            early_stopping=early_stopping,
+            measure_option=measure_option,
+            callbacks=[
+                autotvm.callback.progress_bar(tsk_trial, prefix=prefix),
+                autotvm.callback.log_to_file(tmp_log_file),
+            ],
+        )
+
+    # pick best records to a cache file
+    autotvm.record.pick_best(tmp_log_file, log_filename)
+    os.remove(tmp_log_file)
+
+
+def get_tuning_opt(log_file="tuning.log", n_trial=200):
+    """Returns tuning options"""
+    tuning_opt = {
+        "log_filename": log_file,
+        "tuner": "random",
+        "n_trial": n_trial,
+        "early_stopping": 60,
+        "measure_option": autotvm.measure_option(
+            builder=autotvm.LocalBuilder(timeout=10),
+            runner=autotvm.LocalRunner(number=20, repeat=3, timeout=4, 
min_repeat_ms=150),
+        ),
+    }
+    return tuning_opt
+
+
+TVM_ASSETS = ["mod.so", "graph.json", "params"]
+
+
+class PyTorchTVMModule:
+    """Helper class for compiling pytorch module to tvm module"""
+
+    def __init__(self) -> None:
+        self.script_module = None
+        self.input_infos = None
+        self.default_dtype = "float32"
+        self.mod = None
+        self.params = None
+        self.tasks = None
+        self.target = "cuda"
+        self.dev = tvm.cuda(0)

Review comment:
       Added arguments for target and device




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to