diff --git a/python/tvm/relay/frontend/onnx.py b/python/tvm/relay/frontend/onnx.py index 53f104ce48cf..d91ee4b8c5d7 100644 --- a/python/tvm/relay/frontend/onnx.py +++ b/python/tvm/relay/frontend/onnx.py @@ -944,7 +944,10 @@ def from_onnx(self, graph, opset): dtype=self._params[i_name].dtype) else: self._num_input += 1 - tshape = self._shape[i_name] if i_name in self._shape else () + if i_name in self._shape: + tshape = self._shape[i_name] + else: + raise ValueError("Must provide an input shape for `{0}`.".format(i_name)) if isinstance(self._dtype, dict): dtype = self._dtype[i_name] if i_name in self._dtype else d_type else: diff --git a/tests/python/frontend/onnx/test_forward.py b/tests/python/frontend/onnx/test_forward.py index 1e89b9ddaa8c..58092a610846 100644 --- a/tests/python/frontend/onnx/test_forward.py +++ b/tests/python/frontend/onnx/test_forward.py @@ -694,10 +694,15 @@ def verify_constantfill(is_shape, input_dim, out_dim, value, dtype, **kwargs): else: fill_node = helper.make_node("ConstantFill", ["input_a"], ["out"], value=value, dtype=dtype, **kwargs) + if is_shape == True: + inputs = [] + else: + inputs = [helper.make_tensor_value_info("input_a", + TensorProto.FLOAT, list(input_dim))] + graph = helper.make_graph([fill_node], "fill_test", - inputs = [helper.make_tensor_value_info("input_a", - TensorProto.FLOAT, list(input_dim))], + inputs, outputs = [helper.make_tensor_value_info("out", TensorProto.FLOAT, list(out.shape))])