-
Notifications
You must be signed in to change notification settings - Fork 3.8k
Closed
Labels
Description
https://ci.tlcpack.ai/blue/organizations/jenkins/tvm/detail/main/1868/pipeline/
Expected behavior
Tutorials should bulid
Actual behavior
WARNING: /workspace/tutorials/micro/micro_tflite.py failed to execute correctly: Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/sphinx_gallery/gen_rst.py", line 480, in _memory_usage
out = func()
File "/usr/local/lib/python3.6/dist-packages/sphinx_gallery/gen_rst.py", line 465, in __call__
exec(self.code, self.globals)
File "/workspace/tutorials/micro/micro_tflite.py", line 211, in <module>
module = relay.build(mod, target=TARGET, params=params)
File "../../python/tvm/relay/build_module.py", line 358, in build
mod=ir_mod, target=target, params=params, executor=executor, mod_name=mod_name
File "../../python/tvm/relay/build_module.py", line 172, in build
self._build(mod, target, target_host, executor, mod_name)
File "tvm/_ffi/_cython/./packed_func.pxi", line 323, in tvm._ffi._cy3.core.PackedFuncBase.__call__
File "tvm/_ffi/_cython/./packed_func.pxi", line 267, in tvm._ffi._cy3.core.FuncCall
File "tvm/_ffi/_cython/./base.pxi", line 163, in tvm._ffi._cy3.core.CALL
tvm._ffi.base.TVMError: Traceback (most recent call last):
19: TVMFuncCall
at /workspace/src/runtime/c_runtime_api.cc:474
18: tvm::runtime::PackedFunc::CallPacked(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) const
at /workspace/include/tvm/runtime/packed_func.h:1151
17: std::function<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)>::operator()(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) const
at /usr/include/c++/7/bits/std_function.h:706
16: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::relay::backend::RelayBuildModule::GetFunction(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::runtime::ObjectPtr<tvm::runtime::Object> const&)::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#3}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
at /usr/include/c++/7/bits/std_function.h:316
15: tvm::relay::backend::RelayBuildModule::GetFunction(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::runtime::ObjectPtr<tvm::runtime::Object> const&)::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#3}::operator()(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) const
at /workspace/src/relay/backend/build_module.cc:181
14: tvm::relay::backend::RelayBuildModule::Build(tvm::IRModule, tvm::runtime::Map<tvm::Integer, tvm::Target, void, void> const&, tvm::Target const&, tvm::runtime::String, tvm::runtime::String)
at /workspace/src/relay/backend/build_module.cc:297
13: tvm::relay::backend::RelayBuildModule::BuildRelay(tvm::IRModule, std::unordered_map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, tvm::runtime::NDArray, std::hash<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::equal_to<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, tvm::runtime::NDArray> > > const&, tvm::runtime::String)
at /workspace/src/relay/backend/build_module.cc:461
12: tvm::relay::backend::ExecutorCodegen::Codegen(tvm::relay::Function const&, tvm::runtime::String)
at /workspace/src/relay/backend/build_module.cc:61
11: void tvm::relay::backend::ExecutorCodegen::CallFunc<tvm::relay::Function, tvm::runtime::String>(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::relay::Function, tvm::runtime::String)
at /workspace/src/relay/backend/build_module.cc:112
10: tvm::runtime::TVMRetValue tvm::runtime::PackedFunc::operator()<tvm::relay::Function, tvm::runtime::String>(tvm::relay::Function&&, tvm::runtime::String&&) const
at /workspace/include/tvm/runtime/packed_func.h:1369
9: std::function<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)>::operator()(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) const
at /usr/include/c++/7/bits/std_function.h:706
8: std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::relay::backend::GraphExecutorCodegenModule::GetFunction(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::runtime::ObjectPtr<tvm::runtime::Object> const&)::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#2}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)
at /usr/include/c++/7/bits/std_function.h:316
7: tvm::relay::backend::GraphExecutorCodegenModule::GetFunction(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::runtime::ObjectPtr<tvm::runtime::Object> const&)::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#2}::operator()(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*) const
at /workspace/src/relay/backend/graph_executor_codegen.cc:636
6: tvm::relay::backend::GraphExecutorCodegen::Codegen(tvm::relay::Function, tvm::runtime::String)
at /workspace/src/relay/backend/graph_executor_codegen.cc:261
5: tvm::relay::backend::MemoizedExprTranslator<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > >::VisitExpr(tvm::RelayExpr const&)
at /workspace/src/relay/backend/././utils.h:244
4: tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::VisitExpr(tvm::RelayExpr const&)
at /workspace/include/tvm/relay/expr_functor.h:95
3: tvm::NodeFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)>::operator()(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*) const
at /workspace/include/tvm/node/functor.h:97
2: tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)#6}::_FUN(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)
at /workspace/include/tvm/relay/expr_functor.h:128
1: tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)#6}::operator()(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*) const
at /workspace/include/tvm/relay/expr_functor.h:128
0: tvm::relay::backend::GraphExecutorCodegen::VisitExpr_(tvm::relay::CallNode const*)
at /workspace/src/relay/backend/graph_executor_codegen.cc:456
File "/workspace/src/relay/backend/graph_executor_codegen.cc", line 456
TVMError:
---------------------------------------------------------------
An error occurred during the execution of TVM.
For more information, please see: https://tvm.apache.org/docs/errors.html
---------------------------------------------------------------
Check failed: (global_node) is false: Non-primitive-call nodes should have been transformed away.
The graph executor code generator expects all calls to have their callee normalized to a GlobalVar, but found:
free_var %dense_4_input: Tensor[(1), float32];
%0 = reshape(%dense_4_input, newshape=[-1, 1]) /* ty=Tensor[(1, 1), float32] */;
%1 = nn.dense(%0, meta[relay.Constant][0] /* ty=Tensor[(16, 1), float32] */, units=16) /* ty=Tensor[(1, 16), float32] */;
%2 = add(%1, meta[relay.Constant][1] /* ty=Tensor[(16), float32] */) /* ty=Tensor[(1, 16), float32] */;
%3 = nn.relu(%2) /* ty=Tensor[(1, 16), float32] */;
%4 = reshape(%3, newshape=[-1, 16]) /* ty=Tensor[(1, 16), float32] */;
%5 = nn.dense(%4, meta[relay.Constant][2] /* ty=Tensor[(16, 16), float32] */, units=16) /* ty=Tensor[(1, 16), float32] */;
%6 = add(%5, meta[relay.Constant][3] /* ty=Tensor[(16), float32] */) /* ty=Tensor[(1, 16), float32] */;
%7 = nn.relu(%6) /* ty=Tensor[(1, 16), float32] */;
%8 = reshape(%7, newshape=[-1, 16]) /* ty=Tensor[(1, 16), float32] */;
%9 = nn.dense(%8, meta[relay.Constant][4] /* ty=Tensor[(1, 16), float32] */, units=1) /* ty=Tensor[(1, 1), float32] */;
add(%9, meta[relay.Constant][5] /* ty=Tensor[(1), float32] */) /* ty=Tensor[(1, 1), float32] */
Environment
Routine build from main
Steps to reproduce
I think this is an interaction between two tutorials similar to the one that happened with micro_autotune a few weeks ago. I think to repro you build tutorials from scratch. Will follow-up here.