tqchen commented on a change in pull request #7987:
URL: https://github.com/apache/tvm/pull/7987#discussion_r627758326



##########
File path: python/tvm/te/operation.py
##########
@@ -426,3 +427,51 @@ def reduce_axis(dom, name="rv", thread_tag="", span=None):
         An iteration variable representing the value.
     """
     return tvm.tir.IterVar(dom, name, 2, thread_tag, span)
+
+
+def create_prim_func(ops: List[_tensor.Tensor]) -> tvm.tir.PrimFunc:
+    """Create a TensorIR PrimFunc from tensor expression
+    Parameters
+    ----------
+    ops : List[Tensor]
+        The source expression.
+
+    Example
+    -------
+    We define a matmul kernel using following code:
+
+    .. code-block:: python
+
+        import tvm
+        from tvm import te
+
+        A = te.placeholder((128, 128), name="A")
+        B = te.placeholder((128, 128), name="B")
+        C = te.compute((128, 128), lambda x, y: te.sum(A[x, k] * B[y, k], 
axis=k), name="C")
+        func = create_prim_func([A, B, C])
+        print(tvm.script.asscript(func))
+
+    If we want to use TensorIR schedule to do transformations on such kernel,
+    we need to use `create_prim_func([A, B, C])` to create a schedulable 
PrimFunc.
+    The generated function looks like:
+
+    .. code-block:: python
+
+        def tir_matmul(a: ty.handle, b: ty.handle, c: ty.handle) -> None:

Review comment:
       add a @tvm.script.tir decorator




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to