This is an automated email from the ASF dual-hosted git repository.
syfeng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git
The following commit(s) were added to refs/heads/main by this push:
new 8e33401937 [Bugfix][Relay][Frontend][Keras] Add a assertion to reject
a invalid value for attribute units in RNN layers (#15334)
8e33401937 is described below
commit 8e3340193752b9dbbc66a3d53bc0a69e77eb6660
Author: Qingchao Shen <[email protected]>
AuthorDate: Thu Jul 20 22:41:37 2023 +0800
[Bugfix][Relay][Frontend][Keras] Add a assertion to reject a invalid value
for attribute units in RNN layers (#15334)
---
python/tvm/relay/frontend/keras.py | 5 +++++
tests/python/frontend/keras/test_forward.py | 2 +-
2 files changed, 6 insertions(+), 1 deletion(-)
diff --git a/python/tvm/relay/frontend/keras.py
b/python/tvm/relay/frontend/keras.py
index aba4160695..63938c9e42 100644
--- a/python/tvm/relay/frontend/keras.py
+++ b/python/tvm/relay/frontend/keras.py
@@ -256,6 +256,8 @@ def _convert_dense(
weightList = keras_layer.get_weights()
weight = etab.new_const(weightList[0].transpose([1, 0]))
params = {"weight": weight, "units": weightList[0].shape[1]}
+ units = list(weightList[0].shape)[1]
+ assert units > 0, "The value of units must be a positive integer"
if input_shape is None:
input_shape = keras_layer.input_shape
input_dim = len(input_shape)
@@ -1010,6 +1012,7 @@ def _convert_lstm(
if keras_layer.go_backwards:
in_data = _op.reverse(in_data, axis=1)
units = list(weightList[0].shape)[1]
+ assert units > 0, "The value of units must be a positive integer"
time_steps = in_shape[1]
in_data = _op.squeeze(in_data, axis=[0])
in_data = _op.split(in_data, indices_or_sections=time_steps, axis=0)
@@ -1053,6 +1056,7 @@ def _convert_simple_rnn(
if keras_layer.use_bias:
in_bias = etab.new_const(weightList[2])
units = list(weightList[0].shape)[1]
+ assert units > 0, "The value of units must be a positive integer"
in_data = _op.nn.batch_flatten(in_data)
ixh = _op.nn.dense(in_data, kernel_weight, units=units)
if keras_layer.use_bias:
@@ -1082,6 +1086,7 @@ def _convert_gru(
if keras_layer.use_bias:
in_bias = etab.new_const(weightList[2])
units = list(weightList[0].shape)[1]
+ assert units > 0, "The value of units must be a positive integer"
in_data = _op.nn.batch_flatten(in_data)
matrix_x = _op.nn.dense(in_data, kernel_weight, units=units)
if keras_layer.use_bias:
diff --git a/tests/python/frontend/keras/test_forward.py
b/tests/python/frontend/keras/test_forward.py
index 53e2ca8dbe..44767712d0 100644
--- a/tests/python/frontend/keras/test_forward.py
+++ b/tests/python/frontend/keras/test_forward.py
@@ -251,7 +251,7 @@ class TestKeras:
):
act_funcs = [
keras_mod.layers.LeakyReLU(alpha=None),
- keras_mod.layers.LEU(2, 3, 4),
+ keras_mod.layers.ELU(2, 3, 4),
keras_mod.layers.ReLU(threshold=None),
]
data = keras_mod.layers.Input(shape=(2, 3, 4))