I want to test the usage of relay while_loop and write the following simple example ``` x = relay.var("x", shape=(10, 20)) i = relay.var("i", shape=tuple(), dtype="int32") def myfun(x, i): z = relay.add(x, relay.const(1, "float32")) j = relay.add(i, relay.const(1, "int32")) return z, j def cond(x, i): return relay.less(i, relay.const(10, "int32"))
myloop = while_loop(cond, [x,i], myfun) z = myloop(x, relay.const(0, "int32")) result = relay.Function([x], relay.TupleGetItem(z, 0)) print(result.astext()) zz = result module = tvm.IRModule.from_expr(zz) print(module["main"]) with relay.build_config(opt_level=1): ex = relay.create_executor("debug", mod=module, ctx=tvm.cpu(), target="llvm") #"debug" re = ex.evaluate()(np.zeros((10,20), dtype='float32')) print(re) graph, mod, param = relay.build(module,target="llvm") ``` and the Relay IR seems to be correctly and "debug" interpreter run passed. This is the Relay IR ``` fn (%x: Tensor[(10, 20), float32]) { %3 = ( let %while_loop = fn (%x1: Tensor[(10, 20), float32], %i: int32) { %0 = less(%i, 10); if (%0) { %1 = add(%x1, 1f); %2 = add(%i, 1); %while_loop(%1, %2) } else { (%x1, %i) } }; %while_loop ); %4 = %3(%x, 0); %4.0 } ``` However, When I try to build the module, I get an error message like this: ``` Traceback (most recent call last): File "jacobi.py", line 53, in <module> test_fuse_simple() File "jacobi.py", line 44, in test_fuse_simple graph, mod, param = relay.build(module,target="llvm") File "tvm-latest/lib/python3.7/site-packages/tvm-0.8.dev0-py3.7-linux-x86_64.egg/tvm/relay/build_module.py", line 260, in build graph_json, mod, params = bld_mod.build(mod, target, target_host, params) File "tvm-latest/lib/python3.7/site-packages/tvm-0.8.dev0-py3.7-linux-x86_64.egg/tvm/relay/build_module.py", line 127, in build self._build(mod, target, target_host) File "tvm-latest/lib/python3.7/site-packages/tvm-0.8.dev0-py3.7-linux-x86_64.egg/tvm/_ffi/_ctypes/packed_func.py", line 237, in __call__ raise get_last_ffi_error() tvm._ffi.base.TVMError: Traceback (most recent call last): [bt] (8) .local/tvm/lib/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::relay::backend::GraphRuntimeCodegenModule::GetFunction(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, tvm::runtime::ObjectPtr<tvm::runtime::Object> const&)::{lambda(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*)#2}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)+0x6e) [0x7faa7a736a4e] [bt] (7) .local/tvm/lib/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::Codegen(tvm::relay::Function)+0x49d) [0x7faa7a734e5d] [bt] (6) .local/tvm/lib/libtvm.so(tvm::relay::backend::MemoizedExprTranslator<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > >::VisitExpr(tvm::RelayExpr const&)+0x16a) [0x7faa7a7316da] [bt] (5) .local/tvm/lib/libtvm.so(tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)#10}::_FUN(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)+0x14) [0x7faa7a71ba14] [bt] (4) .local/tvm/lib/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr_(tvm::relay::TupleGetItemNode const*)+0x1d) [0x7faa7a71c19d] [bt] (3) .local/tvm/lib/libtvm.so(tvm::relay::backend::MemoizedExprTranslator<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > >::VisitExpr(tvm::RelayExpr const&)+0x16a) [0x7faa7a7316da] [bt] (2) .local/tvm/lib/libtvm.so(tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>::InitVTable()::{lambda(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)#6}::_FUN(tvm::runtime::ObjectRef const&, tvm::relay::ExprFunctor<std::vector<tvm::relay::backend::GraphNodeRef, std::allocator<tvm::relay::backend::GraphNodeRef> > (tvm::RelayExpr const&)>*)+0x14) [0x7faa7a71b9d4] [bt] (1) .local/tvm/lib/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::VisitExpr_(tvm::relay::CallNode const*)+0x1fb) [0x7faa7a72d24b] [bt] (0) .local/tvm/lib/libtvm.so(+0x17c64d4) [0x7faa7a71b4d4] File "download-file/tvm/src/relay/backend/graph_runtime_codegen.cc", line 355 TVMError: TVM runtime does not support calls to relay.Let ``` Does TVM currently support build of while_loops? --- [Visit Topic](https://discuss.tvm.apache.org/t/relay-cannot-compile-while-loop/8294/1) to respond. You are receiving this because you enabled mailing list mode. To unsubscribe from these emails, [click here](https://discuss.tvm.apache.org/email/unsubscribe/7c62bdc85747465d2ad8e32d088f72068b9b258d8c8af6e4f8e5362db5fee3e2).