This is an automated email from the ASF dual-hosted git repository.
sslyu pushed a commit to branch unity
in repository https://gitbox.apache.org/repos/asf/tvm.git
The following commit(s) were added to refs/heads/unity by this push:
new 0dba0e3ed2 [Unity][UX][Tweak] Make it an error to mark a function
private and specify a global symbol (#15170)
0dba0e3ed2 is described below
commit 0dba0e3ed297a5f80dcd02031bb8c2664a3b5c4b
Author: Steven S. Lyubomirsky <[email protected]>
AuthorDate: Tue Jun 27 23:12:07 2023 -0400
[Unity][UX][Tweak] Make it an error to mark a function private and specify
a global symbol (#15170)
* Make it an error to mark a function private and specify a global symbol
* Fix improper use of global symbol
* Update privacy annotations in other tests
---
src/script/ir_builder/relax/ir.cc | 4 +++
tests/python/relax/test_transform_fuse_ops.py | 2 +-
.../relax/test_transform_fuse_ops_by_pattern.py | 4 +--
.../relax/test_transform_rewrite_cuda_graph.py | 4 +--
tests/python/relax/test_tvmscript_parser.py | 30 ++++++++++++++++++++++
5 files changed, 39 insertions(+), 5 deletions(-)
diff --git a/src/script/ir_builder/relax/ir.cc
b/src/script/ir_builder/relax/ir.cc
index d66e8d0598..116cd02eb5 100644
--- a/src/script/ir_builder/relax/ir.cc
+++ b/src/script/ir_builder/relax/ir.cc
@@ -86,6 +86,10 @@ void FuncAttrs(Map<String, ObjectRef> attrs) {
if (!frame->attrs.empty()) {
LOG(FATAL) << "ValueError: Duplicate function attrs, previous one is:\n"
<< frame->attrs;
}
+ if (attrs.count(tvm::attr::kGlobalSymbol) &&
frame->is_private.value_or(Bool(false))->value) {
+ LOG(FATAL) << "ValueError: Specifying a global symbol attribute even
though the function is "
+ "annotated as private";
+ }
frame->attrs = attrs;
}
diff --git a/tests/python/relax/test_transform_fuse_ops.py
b/tests/python/relax/test_transform_fuse_ops.py
index b51f651025..c98bdd2c8c 100644
--- a/tests/python/relax/test_transform_fuse_ops.py
+++ b/tests/python/relax/test_transform_fuse_ops.py
@@ -1446,7 +1446,7 @@ def test_partially_used_tuple_param():
@I.ir_module
class Expected:
- @R.function
+ @R.function(private=True)
def fused_add_divide(
x_0: R.Tensor((2,), dtype="float32"),
param_0: R.Tensor((), dtype="float32"),
diff --git a/tests/python/relax/test_transform_fuse_ops_by_pattern.py
b/tests/python/relax/test_transform_fuse_ops_by_pattern.py
index 592132516b..e8bcbebe7d 100644
--- a/tests/python/relax/test_transform_fuse_ops_by_pattern.py
+++ b/tests/python/relax/test_transform_fuse_ops_by_pattern.py
@@ -60,7 +60,7 @@ class Conv2dReLU_composite_annotated:
R.output(gv)
return gv
- @R.function(private=True)
+ @R.function
def fused_relax_nn_conv2d_relax_nn_relu_dnnl(
data1: R.Tensor((1, 64, 56, 56), dtype="float32"),
weight11: R.Tensor((64, 64, 3, 3), dtype="float32"),
@@ -377,7 +377,7 @@ class Conv2dx2:
@tvm.script.ir_module
class Conv2dx2_partitioned:
- @R.function(private=True)
+ @R.function
def fused_relax_nn_conv2d_cutlass(
data: R.Tensor((16, 32, 32, 16), dtype="float16"),
weight1: R.Tensor((16, 3, 3, 16), dtype="float16"),
diff --git a/tests/python/relax/test_transform_rewrite_cuda_graph.py
b/tests/python/relax/test_transform_rewrite_cuda_graph.py
index 4f25feb032..52362eae01 100644
--- a/tests/python/relax/test_transform_rewrite_cuda_graph.py
+++ b/tests/python/relax/test_transform_rewrite_cuda_graph.py
@@ -294,7 +294,7 @@ def test_vm_builtin():
T.writes(compute[i0, i1])
compute[i0, i1] = T.exp(rxplaceholder[i0, i1])
- @R.function
+ @R.function(private=True)
def cuda_graph_alloc() -> R.Tuple(R.Object, R.Object):
R.func_attr({"relax.force_pure": True})
storage: R.Object = R.memory.alloc_storage(R.shape([32]),
R.prim_value(0), R.str("global"), R.dtype("float32"))
@@ -302,7 +302,7 @@ def test_vm_builtin():
gv: R.Tuple(R.Object, R.Object) = (storage, storage1)
return gv
- @R.function
+ @R.function(private=True)
def cuda_graph_capture(alloc: R.Tensor((2, 4), dtype="float32"),
alloc1: R.Tensor((2, 4), dtype="float32"), storage: R.Object) ->
R.Tuple(R.Tensor((2, 4), dtype="float32"), R.Tensor((2, 4), dtype="float32")):
R.func_attr({"relax.force_pure": True})
cls = Expected
diff --git a/tests/python/relax/test_tvmscript_parser.py
b/tests/python/relax/test_tvmscript_parser.py
index 9305cdbcb1..564fe04692 100644
--- a/tests/python/relax/test_tvmscript_parser.py
+++ b/tests/python/relax/test_tvmscript_parser.py
@@ -1510,5 +1510,35 @@ def test_private_function():
_check(Addition, bb.get())
+def test_private_function_with_global_symbol_fail():
+ with pytest.raises(tvm.error.DiagnosticError):
+
+ @I.ir_module
+ class Addition:
+ @R.function(private=True)
+ def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+ # it is an error to simultaneously mark a function private
+ # and give it a global symbol manually
+ R.func_attr({"global_symbol": "main"})
+ y = R.add(x, x)
+ return y
+
+ # should not execute
+ _check(Addition)
+
+
+def test_private_function_with_global_symbol_no_module_fail():
+ with pytest.raises(tvm.error.DiagnosticError):
+
+ @R.function(private=True)
+ def func(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+ R.func_attr({"global_symbol": "main"})
+ y = R.add(x, x)
+ return y
+
+ # should not execute
+ _check(func)
+
+
if __name__ == "__main__":
tvm.testing.main()