thomelane commented on a change in pull request #15343: Revise Symbol tutorial
URL: https://github.com/apache/incubator-mxnet/pull/15343#discussion_r297298652
##########
File path: docs/tutorials/basic/symbol.md
##########
@@ -209,63 +160,76 @@ net.list_arguments()
### Modularized Construction for Deep Networks
-Constructing a *deep* network layer by layer, (like the Google Inception
network),
-can be tedious owing to the large number of layers.
-So, for such networks, we often modularize the construction.
+Constructing a *deep* network layer by layer, (like the Google Inception
network), can be tedious owing to the large number of layers. So, for such
networks, we often modularize the construction.
-For example, in Google Inception network,
-we can first define a factory function which chains the convolution,
-batch normalization and rectified linear unit (ReLU) activation layers
together.
+For example, in Google Inception network, we can first define a factory
function which chains the convolution, batch normalization and rectified linear
unit (ReLU) activation layers together.
```python
-def ConvFactory(data, num_filter, kernel, stride=(1,1), pad=(0, 0),name=None,
suffix=''):
- conv = mx.sym.Convolution(data=data, num_filter=num_filter, kernel=kernel,
- stride=stride, pad=pad, name='conv_%s%s' %(name, suffix))
- bn = mx.sym.BatchNorm(data=conv, name='bn_%s%s' %(name, suffix))
- act = mx.sym.Activation(data=bn, act_type='relu', name='relu_%s%s'
- %(name, suffix))
+def ConvFactory(data, num_filter, kernel,
+ stride=(1, 1), pad=(0, 0), name=None, suffix=''):
+ conv = mx.sym.Convolution(data=data, num_filter=num_filter,
+ kernel=kernel, stride=stride, pad=pad,
+ name='conv_%s%s' % (name, suffix))
+
+ bn = mx.sym.BatchNorm(data=conv, name='bn_%s%s' % (name, suffix))
+
+ act = mx.sym.Activation(data=bn, act_type='relu',
+ name='relu_%s%s' % (name, suffix))
return act
+
prev = mx.sym.Variable(name="Previous Output")
-conv_comp = ConvFactory(data=prev, num_filter=64, kernel=(7,7), stride=(2, 2))
+conv_comp = ConvFactory(data=prev, num_filter=64, kernel=(7, 7), stride=(2, 2))
shape = {"Previous Output" : (128, 3, 28, 28)}
-mx.viz.plot_network(symbol=conv_comp, shape=shape,
node_attrs={"shape":"oval","fixedsize":"false"})
+mx.viz.plot_network(symbol=conv_comp, shape=shape,
+ node_attrs={"shape": "oval", "fixedsize": "false"})
```
-Then we can define a function that constructs an inception module based on
-factory function `ConvFactory`.
+Then we can define a function that constructs an inception module based on
factory function `ConvFactory`.
```python
-def InceptionFactoryA(data, num_1x1, num_3x3red, num_3x3, num_d3x3red,
num_d3x3,
- pool, proj, name):
+def InceptionFactoryA(data, num_1x1, num_3x3red, num_3x3, num_d3x3red,
+ num_d3x3, pool, proj, name):
# 1x1
- c1x1 = ConvFactory(data=data, num_filter=num_1x1, kernel=(1, 1),
name=('%s_1x1' % name))
+ c1x1 = ConvFactory(data=data, num_filter=num_1x1, kernel=(1, 1),
+ name=('%s_1x1' % name))
+
# 3x3 reduce + 3x3
- c3x3r = ConvFactory(data=data, num_filter=num_3x3red, kernel=(1, 1),
name=('%s_3x3' % name), suffix='_reduce')
- c3x3 = ConvFactory(data=c3x3r, num_filter=num_3x3, kernel=(3, 3), pad=(1,
1), name=('%s_3x3' % name))
+ c3x3r = ConvFactory(data=data, num_filter=num_3x3red, kernel=(1, 1),
+ name=('%s_3x3' % name), suffix='_reduce')
+ c3x3 = ConvFactory(data=c3x3r, num_filter=num_3x3, kernel=(3, 3),
+ pad=(1, 1), name=('%s_3x3' % name))
+
# double 3x3 reduce + double 3x3
- cd3x3r = ConvFactory(data=data, num_filter=num_d3x3red, kernel=(1, 1),
name=('%s_double_3x3' % name), suffix='_reduce')
- cd3x3 = ConvFactory(data=cd3x3r, num_filter=num_d3x3, kernel=(3, 3),
pad=(1, 1), name=('%s_double_3x3_0' % name))
- cd3x3 = ConvFactory(data=cd3x3, num_filter=num_d3x3, kernel=(3, 3),
pad=(1, 1), name=('%s_double_3x3_1' % name))
+ cd3x3r = ConvFactory(data=data, num_filter=num_d3x3red, kernel=(1, 1),
+ name=('%s_double_3x3' % name), suffix='_reduce')
+ cd3x3 = ConvFactory(data=cd3x3r, num_filter=num_d3x3, kernel=(3, 3),
+ pad=(1, 1), name=('%s_double_3x3_0' % name))
+ cd3x3 = ConvFactory(data=cd3x3, num_filter=num_d3x3, kernel=(3, 3),
+ pad=(1, 1), name=('%s_double_3x3_1' % name))
+
# pool + proj
- pooling = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(1, 1), pad=(1,
1), pool_type=pool, name=('%s_pool_%s_pool' % (pool, name)))
- cproj = ConvFactory(data=pooling, num_filter=proj, kernel=(1, 1),
name=('%s_proj' % name))
+ pooling = mx.sym.Pooling(data=data, kernel=(3, 3), stride=(1, 1),
+ pad=(1, 1), pool_type=pool,
+ name=('%s_pool_%s_pool' % (pool, name)))
+ cproj = ConvFactory(data=pooling, num_filter=proj, kernel=(1, 1),
+ name=('%s_proj' % name))
+
# concat
- concat = mx.sym.Concat(*[c1x1, c3x3, cd3x3, cproj],
name='ch_concat_%s_chconcat' % name)
+ concat = mx.sym.Concat(*[c1x1, c3x3, cd3x3, cproj],
+ name='ch_concat_%s_chconcat' % name)
return concat
+
prev = mx.sym.Variable(name="Previous Output")
in3a = InceptionFactoryA(prev, 64, 64, 64, 64, 96, "avg", 32, name="in3a")
-mx.viz.plot_network(symbol=in3a, shape=shape,
node_attrs={"shape":"oval","fixedsize":"false"})
+mx.viz.plot_network(symbol=in3a, shape=shape,
+ node_attrs={"shape": "oval", "fixedsize": "false"})
```
-Finally, we can obtain the whole network by chaining multiple inception
-modules. See a complete example
-[here](https://github.com/dmlc/mxnet/blob/master/example/image-classification/symbols/inception-bn.py).
+Finally, we can obtain the whole network by chaining multiple inception
modules. See a [complete
example](https://github.com/dmlc/mxnet/blob/master/example/image-classification/symbols/inception-bn.py).
Review comment:
Undo this change. Or if you wanted a larger link could include 'complete
example' in the link.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services