arina-grovety commented on code in PR #14765:
URL: https://github.com/apache/tvm/pull/14765#discussion_r1185816248
##########
python/tvm/relay/op/contrib/ethosu.py:
##########
@@ -2000,6 +2000,79 @@ def is_valid(self):
return True
+class ChannelPadParams:
+ """
+ This class will parse a call to a ethosu.pad2d composite function
+ and extract the parameter information.
+ """
+
+ composite_name = "ethos-u.channel-pad"
+ # The ethos-u.channel-pad composite function will be transformed
+ # to the Relay concatenate operation.
+
+ def __init__(self, func_body: Call):
+ from tvm.relay.backend.contrib.ethosu.util import QPadArgs
+
+ # there is no 'layout' attribute in nn.pad
+ layout = "NHWC"
+ self.ifm = TensorParams(
+ tensor=func_body.args[QPadArgs.IFM.value],
+ layout=layout,
+ scale=tvm.relay.Constant(tvm.nd.array(np.array(1.0,
dtype="float32"))),
+ zero_point=func_body.args[QPadArgs.IFM_ZERO_POINT.value],
+ )
+
+ self.ch_padding = self.extract_ch_padding(func_body)
+ self.ofm = TensorParams(
+ tensor=func_body,
+ layout=layout,
+ scale=tvm.relay.Constant(tvm.nd.array(np.array(1.0,
dtype="float32"))),
+ zero_point=func_body.args[QPadArgs.IFM_ZERO_POINT.value],
+ )
+
+ @staticmethod
+ def extract_ch_padding(
+ padding: relay.Call,
+ ) -> Optional[Tuple[int, int]]:
+ """
+ Here we check whether a separate channel-dimension padding operation
can be
+ rewritten as Relay concatenate operation. If the padding specified by
the
+ separate nn.pad operation is not supported by NPU, None will be
returned.
+ This will cause the nn.pad not to be offloaded to NPU.
+ """
+ pad_width = padding.attrs["pad_width"]
+ if len(pad_width) != 4:
+ return None
+ if (
+ list(pad_width[0]) != [0, 0]
+ or list(pad_width[1]) != [0, 0]
Review Comment:
Yes, you are right, spatial and channel pad can of course occur in neural
networks. This is a separate task, we discussed that it will be useful in the
future. We plan to solve it when we have time or a network with such pad
appears.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]