nonstopfor opened a new issue #6990:
URL: https://github.com/apache/tvm/issues/6990
I am new to TVM and I want to use back propagation to train a simple mlp
model. I find that gradient pass could help me compute the gradient for
different parameters, which could help me train. However, when I try to build,
a problem occurs. Below are the code and the error message.
```
from numpy.core import function_base
import tvm
from tvm import relay
from tvm.relay import GlobalVar
from tvm.relay.op.nn.nn import batch_flatten
from tvm.relay.op.tensor import cos
from tvm.relay.testing import run_infer_type
from tvm.relay import create_executor, transform
from tvm.relay.transform import gradient, PartialEvaluate,
DeadCodeElimination
from tvm.relay.testing import run_infer_type, rand, check_grad,
create_workload
import numpy as np
from tvm.relay.transform.transform import ForwardFoldScaleAxis
from tvm.contrib import graph_runtime as runtime
def normal_mlp(data_shape, label_shape):
data = relay.var("data", shape=data_shape, dtype="float32")
label = relay.var("data", shape=label_shape, dtype="float32")
fc1 = relay.nn.dense(data, relay.var("fc1_weight"), units=128)
fc1 = relay.nn.bias_add(fc1, relay.var("fc1_bias"), axis=-1)
act1 = relay.nn.relu(fc1)
fc2 = relay.nn.dense(act1, relay.var("fc2_weight"), units=64)
fc2 = relay.nn.bias_add(fc2, relay.var("fc2_bias"), axis=-1)
act2 = relay.nn.relu(fc2)
fc3 = relay.nn.dense(act2, relay.var("fc3_weight"), units=10)
fc3 = relay.nn.bias_add(fc3, relay.var("fc3_bias"), axis=-1)
mlp = relay.nn.softmax(data=fc3)
mlp = relay.nn.cross_entropy(mlp, label)
args = relay.analysis.free_vars(mlp)
return relay.Function(args, mlp)
data_shape = (batch_size, 784)
label_shape = (batch_size, 10)
valid_shape = (100, 784)
dtype = "float32"
func = normal_mlp(data_shape, label_shape)
func = run_infer_type(func)
back = run_infer_type(gradient(func))
mod, params = create_workload(back)
opt_level = 0
with tvm.transform.PassContext(opt_level=opt_level):
lib = relay.build(mod, target='llvm', params=params)
```
The error message is:
`Traceback (most recent call last): File “build_test.py”, line 165, in
mlp_training() File “build_test.py”, line 113, in mlp_training lib =
relay.build(mod, target=‘llvm’, params=params) File
“/home/zzx/tvm/python/tvm/relay/build_module.py”, line 260, in build
graph_json, mod, params = bld_mod.build(mod, target, target_host, params) File
“/home/zzx/tvm/python/tvm/relay/build_module.py”, line 127, in build
self._build(mod, target, target_host) File
“/home/zzx/tvm/python/tvm/_ffi/_ctypes/packed_func.py”, line 237, in call raise
get_last_ffi_error() tvm._ffi.base.TVMError: Traceback (most recent call last):
[bt] (8)
/home/zzx/tvm/build/libtvm.so(tvm::relay::backend::GraphRuntimeCodegen::Codegen(tvm::relay::Function)+0x118)
[0x7f85bda6f6b8] [bt] (7)
/home/zzx/tvm/build/libtvm.so(std::_Function_handler<void
(tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*),
tvm::runtime::TypedPackedFunc<tvm::Map<tvm::RelayExpr,
tvm::runtime::Array<tvm::runtime::Array<tvm::Inte
ger, void>, void>, void, void> (tvm::relay::Function
const&)>::AssignTypedLambda<tvm::Map<tvm::RelayExpr,
tvm::runtime::Array<tvm::runtime::Array<tvm::Integer, void>, void>, void, void>
()(tvm::relay::Function const&)>(tvm::Map<tvm::RelayExpr,
tvm::runtime::Array<tvm::runtime::Array<tvm::Integer, void>, void>, void, void>
()(tvm::relay::Function const&))::{lambda(tvm::runtime::TVMArgs const&,
tvm::runtime::TVMRetValue*)#1}>::M_invoke(std::Any_data const&,
tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)+0x124) [0x7f85bda53794]
[bt] (6)
/home/zzx/tvm/build/libtvm.so(tvm::relay::GraphPlanMemory(tvm::relay::Function
const&)+0x183) [0x7f85bda4a9b3] [bt] (5)
/home/zzx/tvm/build/libtvm.so(tvm::relay::StorageAllocator::Plan(tvm::relay::Function
const&)+0x293) [0x7f85bda500c3] [bt] (4)
/home/zzx/tvm/build/libtvm.so(tvm::relay::StorageAllocaBaseVisitor::GetToken(tvm::RelayExpr
const&)+0x16) [0x7f85bda4b786] [bt] (3)
/home/zzx/tvm/build/libtvm.so(tvm::relay::ExprVisitor::VisitExpr(tvm::
RelayExpr const&)+0x8b) [0x7f85bdaf2fab] [bt] (2)
/home/zzx/tvm/build/libtvm.so(tvm::relay::ExprFunctor<void (tvm::RelayExpr
const&)>::VisitExpr(tvm::RelayExpr const&)+0x6f) [0x7f85bdaa3aff] [bt] (1)
/home/zzx/tvm/build/libtvm.so(tvm::relay::StorageAllocaBaseVisitor::VisitExpr(tvm::relay::LetNode
const*)+0x2be) [0x7f85bda52cfe] [bt] (0)
/home/zzx/tvm/build/libtvm.so(+0x113a7d8) [0x7f85bda4a7d8] File
“/home/zzx/tvm/src/relay/backend/graph_plan_memory.cc”, line 118 TVMError:
Check failed: it != token_map.end():`
Does anybody know how to solve this problem?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]