This is an automated email from the ASF dual-hosted git repository.

lukhut pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
     new f4520c4f15 [TVMC] Improve --desired-layouts functionality (#14272)
f4520c4f15 is described below

commit f4520c4f1550bf76e4e547e75ae3ed65d6f55a7b
Author: Philipp van Kempen <[email protected]>
AuthorDate: Thu Mar 16 11:30:59 2023 +0100

    [TVMC] Improve --desired-layouts functionality (#14272)
    
    this aims to make the `--desired-layout` argument more powerful based on 
the previously merged changes from #14010 by introducing two new features:
    
    1. Allow passing multiple arguments to `--desired-layout` instead of only 
one, to specify one layout per transformed operator specified in 
`--desired-layout-ops`. (Number of arguments has to bei either 1 or match the 
number of transformed operators)
    2. Optionally, you can now specify a non-default kernel layout as follows: 
`NHWC:HWIO`
    
    Example Usage: `tvmc compile … --desired-layout nn.max_pool2d qnn.conv2d 
--desired-layout-ops NCHW NHWC:HWIO`
    
    I also added unit tests for the new use-cases.
    
    ### Known Limitations:
    * It would make sense to specify individual kernel layouts for regular 
convolutions and depthwise ones. However since both are usually implemented as 
generalized `nn.conv2d`, we can not transform them individually. Are there any 
good workarounds for this?
    * The arguments of `--desired-layouts` have previously been checked for 
validity during cmdline parsing (e.g. only NCHW and NHWC are allowed) which is 
not possible anymore. Should I add a regular expression for that?
---
 python/tvm/driver/tvmc/transform.py        | 42 +++++++++++----
 tests/python/driver/tvmc/test_transform.py | 86 +++++++++++++++++++++++++++++-
 2 files changed, 117 insertions(+), 11 deletions(-)

diff --git a/python/tvm/driver/tvmc/transform.py 
b/python/tvm/driver/tvmc/transform.py
index 2b34ba11b4..30d9bfa639 100644
--- a/python/tvm/driver/tvmc/transform.py
+++ b/python/tvm/driver/tvmc/transform.py
@@ -103,15 +103,17 @@ def convert_to_mixed_precision(mod, ops=None, 
calculation_type="float16", acc_ty
                 raise TVMCException("Error converting mixed precision : 
{0}".format(str(err)))
 
 
-def convert_graph_layout(mod, desired_layout, ops=None):
+def convert_graph_layout(mod, desired_layouts, ops=None):
     """Alter the layout of the input graph.
 
     Parameters
     ----------
     mod : tvm.IRModule
         The relay module to convert.
-    desired_layout : str
-        The layout to convert to.
+    desired_layouts : list[str]
+        The layouts to convert to.
+        Expects either a single element or one str per operator.
+        Can be only data layouts or combination of both, e.g. NHWC:HWIO
     ops : list
         List of operators to be layout converted.
 
@@ -123,7 +125,27 @@ def convert_graph_layout(mod, desired_layout, ops=None):
     if ops is None:
         ops = ["nn.conv2d", "nn.conv2d_transpose", "qnn.conv2d"]
 
-    desired_layouts = {op: [desired_layout, "default"] for op in ops}
+    if not isinstance(desired_layouts, list):
+        # For backwards compatibility
+        assert isinstance(desired_layouts, str)
+        desired_layouts = [desired_layouts]
+
+    if len(desired_layouts) != len(ops):
+        if len(desired_layouts) != 1:
+            raise TVMCException(
+                "Expected 1 or {} layouts but got {}".format(len(ops), 
len(desired_layouts))
+            )
+        desired_layouts = desired_layouts * len(ops)
+
+    def layout_helper(layout):
+        if ":" in layout:
+            data_layout, kernel_layout = layout.split(":", 1)
+        else:
+            data_layout = layout
+            kernel_layout = "default"
+        return [data_layout, kernel_layout]
+
+    desired_layouts = {op: layout_helper(desired_layouts[i]) for i, op in 
enumerate(ops)}
 
     # Convert the layout of the graph where possible.
     seq = transform.Sequential(
@@ -137,7 +159,7 @@ def convert_graph_layout(mod, desired_layout, ops=None):
     try:
         return seq(mod)
     except Exception as err:
-        raise TVMCException("Error converting layout to {0}: 
{1}".format(desired_layout, str(err)))
+        raise TVMCException("Error converting layouts: {}".format(str(err)))
 
 
 def apply_graph_transforms(mod, args):
@@ -159,7 +181,7 @@ def apply_graph_transforms(mod, args):
         return mod
 
     # AlterLayout
-    if args.get("desired_layout", False):
+    if args.get("desired_layout", None):
         mod = convert_graph_layout(
             mod, args["desired_layout"], args.get("desired_layout_ops", None)
         )
@@ -210,9 +232,11 @@ def generate_transform_args(parser):
     # AlterLayout
     parser.add_argument(
         "--desired-layout",
-        choices=["NCHW", "NHWC"],
-        default=None,
-        help="Change the data layout of the whole graph.",
+        nargs="+",
+        help="Change the data/kernel layout of the graph. (i.e. NCHW or 
NHWC:HWIO)"
+        "This option can be provided multiple times to specify per-operator 
layouts, "
+        "e.g. '--desired-layout NHWC:HWIO' (Apply same layout for every 
operator)."
+        "e.g. '--desired-layout-ops nn.conv2d nn.avg_pool2d --desired-layout 
NCHW NHWC'.",
     )
     parser.add_argument(
         "--desired-layout-ops",
diff --git a/tests/python/driver/tvmc/test_transform.py 
b/tests/python/driver/tvmc/test_transform.py
index 72c7cda6ff..06af3cb156 100644
--- a/tests/python/driver/tvmc/test_transform.py
+++ b/tests/python/driver/tvmc/test_transform.py
@@ -15,6 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
+import pytest
 from unittest.mock import MagicMock
 
 import tvm
@@ -23,6 +24,7 @@ from tvm.relay import testing
 from tvm.relay.expr_functor import ExprMutator
 from tvm.ir.instrument import pass_instrument
 from tvm.driver.tvmc.transform import apply_graph_transforms
+from tvm.driver.tvmc.model import TVMCException
 
 
 def test_layout_transform_fold_constant(relay_conv2d):
@@ -41,7 +43,7 @@ def test_layout_transform_fold_constant(relay_conv2d):
 
     pass_names = CollectPassNames()
     with tvm.transform.PassContext(opt_level=3, instruments=[pass_names]):
-        apply_graph_transforms(relay_conv2d, {"desired_layout": 
desired_layout})
+        apply_graph_transforms(relay_conv2d, {"desired_layout": 
[desired_layout]})
 
     names = pass_names.names
     assert "ConvertLayout" in names
@@ -61,7 +63,7 @@ def 
test_layout_transform_convert_layout_pass_args(relay_conv2d, monkeypatch):
     monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
 
     with tvm.transform.PassContext(opt_level=3):
-        apply_graph_transforms(relay_conv2d, {"desired_layout": 
desired_layout})
+        apply_graph_transforms(relay_conv2d, {"desired_layout": 
[desired_layout]})
 
     mock_convert_layout.assert_called_once_with(
         {
@@ -72,6 +74,86 @@ def 
test_layout_transform_convert_layout_pass_args(relay_conv2d, monkeypatch):
     )
 
 
+def test_layout_transform_convert_kernel_layout_pass_args(relay_conv2d, 
monkeypatch):
+    """
+    Check the convert layout desired layouts arugment is what is expected when
+    a non-default kernel layout is provided.
+    """
+    desired_layout = "NHWC:HWIO"
+    desired_layout_ops = ["nn.conv2d"]
+
+    mock_convert_layout = MagicMock()
+    mock_convert_layout.return_value = relay.transform.ConvertLayout({})
+    monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
+
+    with tvm.transform.PassContext(opt_level=3):
+        apply_graph_transforms(
+            relay_conv2d,
+            {"desired_layout": [desired_layout], "desired_layout_ops": 
desired_layout_ops},
+        )
+
+    mock_convert_layout.assert_called_once_with(
+        {
+            "nn.conv2d": ["NHWC", "HWIO"],
+        }
+    )
+
+
+def test_layout_transform_convert_layout_pass_args_multiple(relay_conv2d, 
monkeypatch):
+    """
+    Check the convert layout desired layouts arugment is what is expected when
+    a multiple desired layouts are provided.
+    """
+    desired_layout = ["NHWC", "NCHW"]
+    desired_layout_ops = ["nn.max_pool2d", "qnn.conv2d"]
+
+    mock_convert_layout = MagicMock()
+    mock_convert_layout.return_value = relay.transform.ConvertLayout({})
+    monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
+
+    with tvm.transform.PassContext(opt_level=3):
+        apply_graph_transforms(
+            relay_conv2d,
+            {"desired_layout": desired_layout, "desired_layout_ops": 
desired_layout_ops},
+        )
+
+    mock_convert_layout.assert_called_once_with(
+        {
+            "nn.max_pool2d": ["NHWC", "default"],
+            "qnn.conv2d": ["NCHW", "default"],
+        }
+    )
+
+
[email protected](
+    "desired",
+    [
+        (["NHWC", "NCHW"], ["nn.max_pool2d"]),
+        (["NHWC", "NCHW"], None),
+    ],
+)
+def test_layout_transform_convert_layout_pass_args_multiple_invalid(
+    relay_conv2d,
+    monkeypatch,
+    desired,
+):
+    """
+    Check invalid cases when passing multiple values to the desired layouts 
argument.
+    """
+    desired_layout, desired_layout_ops = desired
+
+    mock_convert_layout = MagicMock()
+    mock_convert_layout.return_value = relay.transform.ConvertLayout({})
+    monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
+
+    with pytest.raises(TVMCException):
+        with tvm.transform.PassContext(opt_level=3):
+            apply_graph_transforms(
+                relay_conv2d,
+                {"desired_layout": desired_layout, "desired_layout_ops": 
desired_layout_ops},
+            )
+
+
 def test_layout_transform_to_mixed_precision_pass_args_mock(relay_conv2d, 
monkeypatch):
     """
     Check the mixed precision arugments which are expected when

Reply via email to