From 36567c66112cacf1d8115ef8fce7d78c2e814f21 Mon Sep 17 00:00:00 2001 From: Eric Lunderberg Date: Wed, 3 Jan 2024 14:39:35 +0000 Subject: [PATCH] [Unity][UnitTest] Increase atol to resolve flaky CI failure The `tests/python/relax/test_frontend_onnx.py::test_attention` unit test currently has sporadic failures (5/200 executions), which can cause failures for unrelated changes (e.g. [PR#16304](https://github.com/apache/tvm/pull/16304), [CI link](https://ci.tlcpack.ai/blue/organizations/jenkins/tvm-unity/detail/PR-16304/3/pipeline)). This commit specifies a tolerance to use for comparisons, to avoid these spurious CI failures. --- tests/python/relax/test_frontend_onnx.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/python/relax/test_frontend_onnx.py b/tests/python/relax/test_frontend_onnx.py index 19be3e0b0358..748119f6f990 100644 --- a/tests/python/relax/test_frontend_onnx.py +++ b/tests/python/relax/test_frontend_onnx.py @@ -1226,6 +1226,8 @@ def verify_attention( "mask_index": mask_index, "relative_position_bias": relative_position_bias, }, + # Maximum observed delta from 500 iterations was 2e-4. + atol=1e-3, ) # "present" output should be nullptr when the "past" input isn't included, # but ort requires an output shape to be specified?