The GitHub Actions job "Teams" on tvm.git/main has succeeded. Run started by GitHub user dutZ1855 (triggered by dutZ1855).
Head commit for run: ec0fed0f0d76592aa7c48130d1eef5efa21940e3 / Nguyen Duy Loc <[email protected]> [Runtime][MatchShape] Type error: Cannot convert from type ' DLTensor* ' to ' ffi.Shape ' (#18546) # Summary - This commit fixed error : "Type error: Cannot convert from type ' DLTensor* ' to ' ffi.Shape ' ". - Error occurred at runtime when the first args of the MatchShape function (in runtime/vm/builtin.cc) is DLTensor*. If (auto opt_nd = args[0].as<Tensor>()) is false, args[0] (DLTensor*) will try convert to Shape. # Reproduce ## RPC - After over RPCWarppedFunc, Tensor will convert to DLTensor * <img width="887" height="975" alt="RPCWrappedFunc Editpng" src="https://github.com/user-attachments/assets/e80d1798-49ab-4d33-b9c5-2bb85bd53b87" /> ``` Cell In[3], line 189 186 remote_input = tvm.runtime.tensor(input_data, dev) 188 # Run inference on remote device --> 189 output = vm["main"](remote_input) 191 # Extract result (handle both tuple and single tensor outputs) 192 if isinstance(output, tvm.ir.Array) and len(output) > 0: File python/tvm_ffi/cython/function.pxi:904, in tvm_ffi.core.Function.__call__() File ~/Programming/tvm/src/runtime/rpc/rpc_module.cc:141, in tvm::runtime::RPCWrappedFunc::operator()(tvm::ffi::PackedArgs, tvm::ffi::Any*) const() 139 } 140 auto set_return = [this, rv](ffi::PackedArgs args) { this->WrapRemoteReturnToValue(args, rv); }; --> 141 sess_->CallFunc(handle_, ffi::PackedArgs(packed_args.data(), packed_args.size()), set_return); 142 } 143 File ~/Programming/tvm/src/runtime/rpc/rpc_endpoint.cc:1116, in tvm::runtime::RPCClientSession::CallFunc(void*, tvm::ffi::PackedArgs, std::function<void (tvm::ffi::PackedArgs)> const&)() 1114 void CallFunc(PackedFuncHandle func, ffi::PackedArgs args, 1115 const FEncodeReturn& fencode_return) final { -> 1116 endpoint_->CallFunc(func, args, fencode_return); 1117 } 1118 File ~/Programming/tvm/src/runtime/rpc/rpc_endpoint.cc:906, in tvm::runtime::RPCEndpoint::CallFunc(void*, tvm::ffi::PackedArgs, std::function<void (tvm::ffi::PackedArgs)>)() 904 handler_->SendPackedSeq(args.data(), args.size(), true); 905 --> 906 code = HandleUntilReturnEvent(true, encode_return); 907 ICHECK(code == RPCCode::kReturn) << "code=" << RPCCodeToString(code); 908 } File ~/Programming/tvm/src/runtime/rpc/rpc_endpoint.cc:746, in tvm::runtime::RPCEndpoint::HandleUntilReturnEvent(bool, std::function<void (tvm::ffi::PackedArgs)>)() 744 } 745 } --> 746 code = handler_->HandleNextEvent(client_mode, false, setreturn); 747 } 748 return code; File ~/Programming/tvm/src/runtime/rpc/rpc_endpoint.cc:134, in tvm::runtime::RPCEndpoint::EventHandler::HandleNextEvent(bool, bool, std::function<void (tvm::ffi::PackedArgs)>)() 132 } 133 case kProcessPacket: { --> 134 this->HandleProcessPacket(setreturn); 135 break; 136 } File ~/Programming/tvm/src/runtime/rpc/rpc_endpoint.cc:409, in tvm::runtime::RPCEndpoint::EventHandler::HandleProcessPacket(std::function<void (tvm::ffi::PackedArgs)>)() 407 case RPCCode::kException: 408 case RPCCode::kReturn: { --> 409 this->HandleReturn(code, setreturn); 410 break; 411 } File ~/Programming/tvm/src/runtime/rpc/rpc_endpoint.cc:473, in tvm::runtime::RPCEndpoint::EventHandler::HandleReturn(tvm::runtime::RPCCode, std::function<void (tvm::ffi::PackedArgs)>)() 471 msg = "RPCError: Error caught from RPC call:\n" + msg; 472 } --> 473 LOG(FATAL) << msg; 474 } 475 File ~/Programming/tvm/include/tvm/runtime/logging.h:321, in tvm::runtime::detail::LogFatal::~LogFatal()() 319 #endif 320 [[noreturn]] ~LogFatal() TVM_THROW_EXCEPTION { --> 321 GetEntry().Finalize(); 322 throw; 323 } File ~/Programming/tvm/include/tvm/runtime/logging.h:337, in tvm::runtime::detail::LogFatal::Entry::Finalize()() 335 } 336 [[noreturn]] TVM_NO_INLINE dmlc::Error Finalize() TVM_THROW_EXCEPTION { --> 337 InternalError error(file_, lineno_, stream_.str()); 338 #if DMLC_LOG_BEFORE_THROW 339 std::cerr << error.what() << std::endl; RPCError: Error caught from RPC call: Cannot convert from type `DLTensor*` to `ffi.Shape` ``` ## C++ - Input is DLTensor * ``` std::cout << "--- TVM C++ TorchMLP Example ---" << std::endl; int64_t shapes[] = {1, 1, 28, 28}; ... // Load binary data std::vector<float> input_img = load_bin(argv[1], shapes[0] * shapes[1] * shapes[2] * shapes[3]); // Create DLTensor for input DLTensor* dl_tensor = new DLTensor(); dl_tensor->data = malloc(sizeof(float) * input_img.size()); dl_tensor->device = device; dl_tensor->ndim = sizeof(shapes) / sizeof(shapes[0]); dl_tensor->dtype = DLDataType{kDLFloat, 32, 1}; dl_tensor->shape = shapes; dl_tensor->strides = nullptr; dl_tensor->byte_offset = 0; memcpy(dl_tensor->data, input_img.data(), sizeof(float) * input_img.size()); // Run the main function tvm::ffi::Array<tvm::ffi::Tensor> ouput_temp = (*main)(dl_tensor).cast<tvm::ffi::Array<tvm::ffi::Tensor>>(); ``` <img width="615" height="147" alt="cpp" src="https://github.com/user-attachments/assets/abbcd829-9aec-48f0-ac5d-804f7e9cbff0" /> # Resolved - Instead of converting to Tensor, try_cast to DLTensor*, then create Shape Report URL: https://github.com/apache/tvm/actions/runs/20482328280 With regards, GitHub Actions via GitBox --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
