This is an automated email from the ASF dual-hosted git repository.
echuraev pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git
The following commit(s) were added to refs/heads/main by this push:
new fd2a510ccf [Relay][BugFix] fix a bug about ReLu in the threshold
attribute which causes a different results with keras (#14824)
fd2a510ccf is described below
commit fd2a510ccf76efd7b678c4746c1fa72ce496b58d
Author: Qingchao Shen <[email protected]>
AuthorDate: Thu May 11 16:48:41 2023 +0800
[Relay][BugFix] fix a bug about ReLu in the threshold attribute which
causes a different results with keras (#14824)
* fix relu threshold attribute
* Update test_forward.py
---
python/tvm/relay/frontend/keras.py | 2 ++
tests/python/frontend/keras/test_forward.py | 1 +
2 files changed, 3 insertions(+)
diff --git a/python/tvm/relay/frontend/keras.py
b/python/tvm/relay/frontend/keras.py
index ef94c74e03..b820ad586d 100644
--- a/python/tvm/relay/frontend/keras.py
+++ b/python/tvm/relay/frontend/keras.py
@@ -145,6 +145,8 @@ def _convert_advanced_activation(inexpr, keras_layer, etab,
data_layout, input_s
axis = axis + 1 if axis < dims - 1 else 1
return _op.nn.softmax(inexpr, axis=axis)
if act_type == "ReLU":
+ if np.isnan(keras_layer.threshold).any():
+ raise tvm.error.OpAttributeInvalid("The threshold value of a ReLU
cannot be None.")
threshold = _expr.const(keras_layer.threshold, dtype="float32")
if keras_layer.max_value and float(keras_layer.threshold) == 0:
# f(x) = max_value, for x >= max_value
diff --git a/tests/python/frontend/keras/test_forward.py
b/tests/python/frontend/keras/test_forward.py
index 92b8ce0a64..a5305430c2 100644
--- a/tests/python/frontend/keras/test_forward.py
+++ b/tests/python/frontend/keras/test_forward.py
@@ -227,6 +227,7 @@ class TestKeras:
act_funcs = [
keras_mod.layers.LeakyReLU(alpha=None),
keras_mod.layers.LEU(2, 3, 4),
+ keras_mod.layers.ReLU(threshold=None),
]
data = keras_mod.layers.Input(shape=(2, 3, 4))
for act_func in act_funcs: