acphile commented on issue #18412:
URL:
https://github.com/apache/incubator-mxnet/issues/18412#issuecomment-639213050
## A example
```
from mxnet import np, npx
from mxnet.gluon import nn, parameter
npx.set_np()
class Net(nn.HybridBlock):
def __init__(self, **kwargs):
super(Net, self).__init__(**kwargs)
self.hidden1 = nn.Dense(3)
def hybrid_forward(self, F, x):
x = self.hidden1(x)
return x
class Net2(nn.HybridBlock):
def __init__(self, **kwargs):
super(Net2, self).__init__(**kwargs)
self.hidden2 = nn.Dense(3, activation='relu')
def hybrid_forward(self, F, x):
x = self.hidden2(x)
return x
>>> x = np.random.normal(size=(5, 10))
>>> net = Net()
>>> net.initialize()
>>> print(net.collect_params())
{'hidden1_weight': Parameter hidden1_weight (shape=(3, -1), dtype=float32),
'hidden1_bias': Parameter hidden1_bias (shape=(3,), dtype=float32)}
>>> print(net(x))
[[ 0.09921001 0.04954842 0.12571132]
[-0.06151271 -0.17121975 -0.18948194]
[ 0.0051947 -0.08211827 0.02048509]
[ 0.10466634 -0.08711289 -0.18864125]
[ 0.26467288 -0.08746998 0.16121587]]
>>> net2=Net2()
>>> net2.initialize()
>>> print(net2.collect_params())
{'hidden2_weight': Parameter hidden2_weight (shape=(3, -1), dtype=float32),
'hidden2_bias': Parameter hidden2_bias (shape=(3,), dtype=float32)}
>>> net2.hidden2.share_parameters(net.hidden1.collect_params())
"""
equals to
net2.hidden2.weight=net.hidden1.weight
net2.hidden2.bias=net.hidden1.bias
"""
>>> print(net2.hidden2.weight is net.hidden1.weight)
True
>>> print(net2.hidden2.bias is net.hidden1.bias)
True
>>> net2.initialize()
/home/ubuntu/incubator-mxnet/python/mxnet/gluon/block.py:410: UserWarning:
Parameter hidden1_weight has generated its symbol representation, which could
be used in some cached graph. Skip the operation that sets its name as
hidden2_weight.
self._set_prefix(recorded, prefix)
/home/ubuntu/incubator-mxnet/python/mxnet/gluon/block.py:410: UserWarning:
Parameter hidden1_bias has generated its symbol representation, which could be
used in some cached graph. Skip the operation that sets its name as
hidden2_bias.
self._set_prefix(recorded, prefix)
/home/ubuntu/incubator-mxnet/python/mxnet/gluon/block.py:694: UserWarning:
Parameter 'hidden1_weight' is already initialized, ignoring. Set
force_reinit=True to re-initialize.
v.initialize(None, ctx, init, force_reinit=force_reinit)
/home/ubuntu/incubator-mxnet/python/mxnet/gluon/block.py:694: UserWarning:
Parameter 'hidden1_bias' is already initialized, ignoring. Set
force_reinit=True to re-initialize.
v.initialize(None, ctx, init, force_reinit=force_reinit)
>>> net2.hybridize()
>>> print(net2.collect_params())
{'hidden2_weight': Parameter hidden1_weight (shape=(3, 10), dtype=float32),
'hidden2_bias': Parameter hidden1_bias (shape=(3,), dtype=float32)}
>>> print(net2(x)) # should equal to relu(net1(x))
[[ 0.09921001 0.04954842 0.12571132]
[-0. -0. -0. ]
[ 0.0051947 -0. 0.02048509]
[ 0.10466634 -0. -0. ]
[ 0.26467288 -0. 0.16121587]]
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]