This is an automated email from the ASF dual-hosted git repository.

syfeng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
     new 196b694d68 [FRONTEND][ONNX] Make bias input optional in 
LayerNormalization (#17980)
196b694d68 is described below

commit 196b694d688621fd56e396c4c1ba16ad8f0810e1
Author: Youngsik Yang <[email protected]>
AuthorDate: Thu May 15 15:45:51 2025 +0900

    [FRONTEND][ONNX] Make bias input optional in LayerNormalization (#17980)
    
    This change updates the LayerNormalization converter to support ONNX
    models where the optional bias input is not provided.
    When bias is missing, an empty bias tensor is generated.
    
    This behavior aligns with the ONNX’s spec for LayerNormalization (opset 
17+) where
    the bias input is officially optional.
---
 python/tvm/relax/frontend/onnx/onnx_frontend.py |  4 ++++
 tests/python/relax/test_frontend_onnx.py        | 18 ++++++++++++++++++
 2 files changed, 22 insertions(+)

diff --git a/python/tvm/relax/frontend/onnx/onnx_frontend.py 
b/python/tvm/relax/frontend/onnx/onnx_frontend.py
index 24217184b5..32b2c508f0 100644
--- a/python/tvm/relax/frontend/onnx/onnx_frontend.py
+++ b/python/tvm/relax/frontend/onnx/onnx_frontend.py
@@ -2489,6 +2489,10 @@ class LayerNormalization(OnnxOpConverter):
         axis = attr.get("axis", -1)
         epsilon = attr.get("epsilon", 1e-05)
 
+        if bias is None:
+            seq_len = data.struct_info.shape[1].value
+            bias = relax.const([0.0] * seq_len, dtype="float32")
+
         output = relax.op.nn.layer_norm(data, scale, bias, axis, epsilon)
         # Onnx layernorm has 3 outputs but only the first is used.
         # We construct two empty constants for this.
diff --git a/tests/python/relax/test_frontend_onnx.py 
b/tests/python/relax/test_frontend_onnx.py
index f533c79455..b7305d4810 100644
--- a/tests/python/relax/test_frontend_onnx.py
+++ b/tests/python/relax/test_frontend_onnx.py
@@ -1303,6 +1303,24 @@ def test_layer_norm():
     model = helper.make_model(graph, producer_name="layer_norm_test")
     check_correctness(model)
 
+    # Test case with no bias that is an optional input
+    layer_norm_node = helper.make_node("LayerNormalization", ["a", "b"], 
["d"], epsilon=1e-12)
+
+    graph = helper.make_graph(
+        [layer_norm_node],
+        "layer_norm_test",
+        inputs=[
+            helper.make_tensor_value_info("a", TensorProto.FLOAT, [32, 32]),
+            helper.make_tensor_value_info("b", TensorProto.FLOAT, [32]),
+        ],
+        outputs=[
+            helper.make_tensor_value_info("d", TensorProto.FLOAT, [32, 32]),
+        ],
+    )
+
+    model = helper.make_model(graph, producer_name="layer_norm_test")
+    check_correctness(model)
+
 
 # TODO Enable dynamism
 @pytest.mark.parametrize("dynamic", [False])

Reply via email to