DaTouJun commented on issue #18573:
URL: https://github.com/apache/tvm/issues/18573#issuecomment-3632835415
> I have checked the following minimal reproduction example, and it runs
without errors in my environment.
>
> import torch
> import torch.nn as nn
> from tvm.relax.frontend.torch import from_exported_program
>
> input_size = 28 * 28
> num_classes = 10
>
>
> class SimpleNet(nn.Module):
> def __init__(self):
> super(SimpleNet, self).__init__()
> self.fc1 = nn.Linear(input_size, 128)
> self.fc2 = nn.Linear(128, num_classes)
>
> def forward(self, x: torch.Tensor):
> x = x.view(x.size(0), -1)
> x = torch.relu(self.fc1(x))
> x = self.fc2(x)
> return x
>
>
> model = SimpleNet()
> example_args = (torch.randn(1, 1, 28, 28).to(torch.device("cpu")),)
> exported_program = torch.export.export(model, example_args)
> output_path = "model.pt2"
> torch.export.save(exported_program, output_path)
>
>
> ##############################
>
> exported_program = torch.export.load("model.pt2")
> mod = from_exported_program(exported_program)
> mod.show()
This example is also not work.
/home/guan/miniconda3/envs/tvm/bin/python
/home/guan/dev/pycharm/TVM/debuggit/demo.py
/home/guan/miniconda3/envs/tvm/lib/python3.11/site-packages/torch/export/pt2_archive/_package.py:682:
UserWarning: The given buffer is not writable, and PyTorch does not support
non-writable tensors. This means you can write to the underlying (supposedly
non-writable) buffer using the tensor. You may want to copy the buffer to
protect its data or make it writable before converting it to a tensor. This
type of warning will be suppressed for the rest of this program. (Triggered
internally at /pytorch/torch/csrc/utils/tensor_new.cpp:1581.)
tensor = torch.frombuffer(
Traceback (most recent call last):
File "/home/guan/dev/pycharm/TVM/debuggit/demo.py", line 34, in <module>
mod = from_exported_program(exported_program)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"/home/guan/miniconda3/envs/tvm/lib/python3.11/site-packages/tvm/relax/frontend/torch/exported_program_translator.py",
line 1261, in from_exported_program
return ExportedProgramImporter().from_exported_program(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"/home/guan/miniconda3/envs/tvm/lib/python3.11/site-packages/tvm/relax/frontend/torch/exported_program_translator.py",
line 1156, in from_exported_program
self.env[node] = self.convert_map[func_name](node)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"/home/guan/miniconda3/envs/tvm/lib/python3.11/site-packages/tvm/relax/frontend/torch/base_fx_graph_translator.py",
line 1109, in _linear
return self.block_builder.emit(relax.op.linear(x, weight, bias,
"float32"))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"/home/guan/miniconda3/envs/tvm/lib/python3.11/site-packages/tvm/relax/block_builder.py",
line 328, in emit
return _ffi_api.BlockBuilderEmit(self, expr, name_hint) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "python/tvm_ffi/cython/function.pxi", line 904, in
tvm_ffi.core.Function.__call__
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 1068, in
operator()
return builder->Emit(expr, name_hint);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 243, in
tvm::relax::BlockBuilderImpl::Emit(tvm::RelaxExpr, tvm::ffi::String)
return this->Emit(expr, CurrentBlockFrame()->is_dataflow, name_hint);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 395, in
tvm::relax::BlockBuilderImpl::Emit(tvm::RelaxExpr, bool, tvm::ffi::String)
expr = this->Normalize(expr);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 532, in
tvm::relax::Normalizer::Normalize(tvm::RelaxExpr const&)
Expr normalized = this->VisitExpr(expr);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 615, in
tvm::relax::Normalizer::VisitExpr(tvm::RelaxExpr const&)
return ExprFunctor::VisitExpr(expr);
File "/home/guan/dev/tvm/include/tvm/relax/expr_functor.h", line 132, in
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>::VisitExpr(tvm::RelaxExpr const&)
return vtable(n, this, std::forward<Args>(args)...);
File "/home/guan/dev/tvm/include/tvm/node/functor.h", line 102, in
tvm::NodeFunctor<tvm::RelaxExpr (tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>*)>::operator()(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr const&)>*) const
return (*func_[n->type_index() - begin_type_index_])(n,
std::forward<Args>(args)...);
File "/home/guan/dev/tvm/include/tvm/relax/expr_functor.h", line 171, in
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>::InitVTable()::{lambda(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>*)#9}::_FUN(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr const&)>*)
RELAX_EXPR_FUNCTOR_DISPATCH(CallNode);
File "/home/guan/dev/tvm/include/tvm/relax/expr_functor.h", line 171, in
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>::InitVTable()::{lambda(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>*)#9}::operator()(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr const&)>*) const
RELAX_EXPR_FUNCTOR_DISPATCH(CallNode);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 654, in
tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode const*)
op->args.Map([this](const Expr& arg) { return NormalizeArgument(arg); });
File
"/home/guan/dev/tvm/3rdparty/tvm-ffi/include/tvm/ffi/container/array.h", line
799, in tvm::ffi::Array<tvm::RelaxExpr,
std::enable_if<storage_enabled_v<tvm::RelaxExpr>, void>::type>
tvm::ffi::Array<tvm::RelaxExpr,
void>::Map<tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode
const*)::{lambda(tvm::RelaxExpr const&)#1},
tvm::RelaxExpr>(tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode
const*)::{lambda(tvm::RelaxExpr const&)#1}) const
return Array<U>(MapHelper(data_, fmap));
File
"/home/guan/dev/tvm/3rdparty/tvm-ffi/include/tvm/ffi/container/array.h", line
975, in tvm::ffi::ObjectPtr<tvm::ffi::Object> tvm::ffi::Array<tvm::RelaxExpr,
void>::MapHelper<tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode
const*)::{lambda(tvm::RelaxExpr const&)#1},
tvm::RelaxExpr>(tvm::ffi::ObjectPtr<tvm::ffi::Object>,
tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode
const*)::{lambda(tvm::RelaxExpr const&)#1})
U mapped = fmap(details::AnyUnsafe::CopyFromAnyViewAfterCheck<T>(*it));
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 654, in
tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode
const*)::{lambda(tvm::RelaxExpr const&)#1}::operator()(tvm::RelaxExpr const&)
const
op->args.Map([this](const Expr& arg) { return NormalizeArgument(arg); });
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 563, in
tvm::relax::Normalizer::NormalizeArgument(tvm::RelaxExpr const&)
Expr post = ExprFunctor::VisitExpr(arg);
File "/home/guan/dev/tvm/include/tvm/relax/expr_functor.h", line 132, in
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>::VisitExpr(tvm::RelaxExpr const&)
return vtable(n, this, std::forward<Args>(args)...);
File "/home/guan/dev/tvm/include/tvm/node/functor.h", line 102, in
tvm::NodeFunctor<tvm::RelaxExpr (tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>*)>::operator()(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr const&)>*) const
return (*func_[n->type_index() - begin_type_index_])(n,
std::forward<Args>(args)...);
File "/home/guan/dev/tvm/include/tvm/relax/expr_functor.h", line 171, in
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>::InitVTable()::{lambda(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>*)#9}::_FUN(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr const&)>*)
RELAX_EXPR_FUNCTOR_DISPATCH(CallNode);
File "/home/guan/dev/tvm/include/tvm/relax/expr_functor.h", line 171, in
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>::InitVTable()::{lambda(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr
const&)>*)#9}::operator()(tvm::ffi::ObjectRef const&,
tvm::relax::ExprFunctor<tvm::RelaxExpr (tvm::RelaxExpr const&)>*) const
RELAX_EXPR_FUNCTOR_DISPATCH(CallNode);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 664, in
tvm::relax::Normalizer::VisitExpr_(tvm::relax::CallNode const*)
auto inferred_sinfo = InferStructInfo(call);
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 847, in
tvm::relax::Normalizer::InferStructInfo(tvm::relax::Call const&)
return op_map_infer_struct_info_[op](call,
ffi::GetRef<BlockBuilder>(this));
File "/home/guan/dev/tvm/src/relax/op/tensor/linear_algebra.cc", line 141,
in tvm::relax::InferStructInfoMatmul(tvm::relax::Call const&,
tvm::relax::BlockBuilder const&)
ctx->ReportFatal(Diagnostic::Error(call)
File "/home/guan/dev/tvm/src/relax/ir/block_builder.cc", line 157, in
tvm::relax::BlockBuilderImpl::ReportFatal(tvm::Diagnostic const&)
LOG(FATAL) << diagnostic->message;
File "/home/guan/dev/tvm/include/tvm/runtime/logging.h", line 321, in
tvm::runtime::detail::LogFatal::~LogFatal()
GetEntry().Finalize();
File "/home/guan/dev/tvm/include/tvm/runtime/logging.h", line 337, in
tvm::runtime::detail::LogFatal::Entry::Finalize()
InternalError error(file_, lineno_, stream_.str());
tvm.error.InternalError: Matmul requires the reduction length of the
operands to be equal. However, the LHS lv has shape R.shape([1, 10]), while
the RHS lv1 has shape R.shape([784, 128]). The reduction dimensions of
T.int64(10) and T.int64(784) are not equal.
[22:56:24] /home/guan/dev/tvm/src/relax/ir/block_builder.cc:64: Warning:
BlockBuilder destroyed with remaining blocks!
进程已结束,退出代码为 1
And I also tried different torch version vary from 2.9.1+cu130 to
2.8.0+cu129. And the tvm is build from source which might have some problem but
current I upgrade the system so taht I could not test various tvm build.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]