This is an automated email from the ASF dual-hosted git repository.

jxie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new dae6cda  add use_global_stats in nn.BatchNorm
dae6cda is described below

commit dae6cda0e2f3dbdeedcc9139def33a185fa9a2f8
Author: wuwei <wuweigr...@qq.com>
AuthorDate: Sun Jan 14 15:57:38 2018 +0800

    add use_global_stats in nn.BatchNorm
---
 python/mxnet/gluon/nn/basic_layers.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index a66cc22..43b4bda 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -308,6 +308,10 @@ class BatchNorm(HybridBlock):
         When the next layer is linear (also e.g. `nn.relu`),
         this can be disabled since the scaling
         will be done by the next layer.
+    use_global_stats: bool, default False
+        If True, use global moving statistics instead of local batch-norm. 
This will force
+        change batch-norm into a scale shift operator.
+        If False, use local batch-norm.
     beta_initializer: str or `Initializer`, default 'zeros'
         Initializer for the beta weight.
     gamma_initializer: str or `Initializer`, default 'ones'
@@ -329,12 +333,12 @@ class BatchNorm(HybridBlock):
         - **out**: output tensor with the same shape as `data`.
     """
     def __init__(self, axis=1, momentum=0.9, epsilon=1e-5, center=True, 
scale=True,
-                 beta_initializer='zeros', gamma_initializer='ones',
+                 use_global_stats=False, beta_initializer='zeros', 
gamma_initializer='ones',
                  running_mean_initializer='zeros', 
running_variance_initializer='ones',
                  in_channels=0, **kwargs):
         super(BatchNorm, self).__init__(**kwargs)
         self._kwargs = {'axis': axis, 'eps': epsilon, 'momentum': momentum,
-                        'fix_gamma': not scale}
+                        'fix_gamma': not scale, 'use_global_stats': 
use_global_stats}
         if in_channels != 0:
             self.in_channels = in_channels
 

-- 
To stop receiving notification emails like this one, please contact
j...@apache.org.

Reply via email to