From ee1511a3243ba5f588f135bec5408f50039695a5 Mon Sep 17 00:00:00 2001 From: Qingchao Shen Date: Mon, 17 Jul 2023 13:16:31 +0800 Subject: [PATCH 1/3] capture the invalid value about RNN Capture the invalid value of units. --- python/tvm/relay/frontend/keras.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 1913d4a2681a..f99f1806616d 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -1008,6 +1008,7 @@ def _convert_lstm( if keras_layer.go_backwards: in_data = _op.reverse(in_data, axis=1) units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" time_steps = in_shape[1] in_data = _op.squeeze(in_data, axis=[0]) in_data = _op.split(in_data, indices_or_sections=time_steps, axis=0) @@ -1051,6 +1052,7 @@ def _convert_simple_rnn( if keras_layer.use_bias: in_bias = etab.new_const(weightList[2]) units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" in_data = _op.nn.batch_flatten(in_data) ixh = _op.nn.dense(in_data, kernel_weight, units=units) if keras_layer.use_bias: @@ -1080,6 +1082,7 @@ def _convert_gru( if keras_layer.use_bias: in_bias = etab.new_const(weightList[2]) units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" in_data = _op.nn.batch_flatten(in_data) matrix_x = _op.nn.dense(in_data, kernel_weight, units=units) if keras_layer.use_bias: From 77c14b998c1b4c62ed35b8f7596d6de21ad9416c Mon Sep 17 00:00:00 2001 From: Qingchao Shen Date: Mon, 17 Jul 2023 13:21:29 +0800 Subject: [PATCH 2/3] fix a typo in test case --- tests/python/frontend/keras/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/keras/test_forward.py b/tests/python/frontend/keras/test_forward.py index 50a0e9850559..2f6e1098df78 100644 --- a/tests/python/frontend/keras/test_forward.py +++ b/tests/python/frontend/keras/test_forward.py @@ -244,7 +244,7 @@ def test_forward_activations_except(self, keras_mod): ): act_funcs = [ keras_mod.layers.LeakyReLU(alpha=None), - keras_mod.layers.LEU(2, 3, 4), + keras_mod.layers.ELU(2, 3, 4), keras_mod.layers.ReLU(threshold=None), ] data = keras_mod.layers.Input(shape=(2, 3, 4)) From b570401a86e06ae17cafdae8b77809559f8d33e4 Mon Sep 17 00:00:00 2001 From: Qingchao Shen Date: Tue, 18 Jul 2023 02:04:04 +0800 Subject: [PATCH 3/3] fix unite in dense --- python/tvm/relay/frontend/keras.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index f99f1806616d..ec960b9f0b12 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -254,6 +254,8 @@ def _convert_dense( weightList = keras_layer.get_weights() weight = etab.new_const(weightList[0].transpose([1, 0])) params = {"weight": weight, "units": weightList[0].shape[1]} + units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" if input_shape is None: input_shape = keras_layer.input_shape input_dim = len(input_shape)