zhreshold commented on a change in pull request #8005: add warmup lr_scheduler
URL: https://github.com/apache/incubator-mxnet/pull/8005#discussion_r141696554
 
 

 ##########
 File path: tests/python/unittest/test_lr_scheduler.py
 ##########
 @@ -0,0 +1,57 @@
+import logging
+import mxnet as mx 
+import mxnet.optimizer as opt              
+
+def test_lr_sceduler(lr, steps, lr_factor, warmup_step, warmup_lr):
+    logging.basicConfig(level=logging.DEBUG) 
+
+    lr_scheduler = None
+    if warmup_step > 0 and warmup_lr > lr:
+        lr_scheduler =  mx.lr_scheduler.MultiFactorScheduler(step=steps, 
factor=lr_factor, 
+                    warmup_step = warmup_step, begin_lr=lr, stop_lr=warmup_lr)
+    else:  
+        lr_scheduler =  mx.lr_scheduler.MultiFactorScheduler(step=steps, 
factor=lr_factor) 
+
+    optimizer_params = {
+            'learning_rate': lr,
+            'lr_scheduler': lr_scheduler}
+
+    optimizer = opt.create('sgd', **optimizer_params)  
+    updater = opt.get_updater(optimizer)     
+
+    x = [[[[i*10+j for j in range(10)] for i in range(10)]]]
+    x = mx.nd.array(x, dtype='float32')
+    y = mx.nd.ones(shape = x.shape, dtype='float32') 
+
+    res_lr = []
+    for i in range(1,steps[-1] + 5):
+        updater(0, y, x)
+        cur_lr = optimizer._get_lr(0)
+        res_lr.append(cur_lr)
+        logging.info("step %d lr = %f", i, cur_lr)
+
+    if warmup_step > 1:
+        assert mx.test_utils.almost_equal(res_lr[warmup_step], warmup_lr, 
1e-10) 
+        lr = warmup_lr
+    for i in range(len(steps)):
+        assert mx.test_utils.almost_equal(res_lr[steps[i]], lr * 
pow(lr_factor, i + 1), 1e-10)    
+
+if __name__ == "__main__":
+    #Legal input
 
 Review comment:
   Wrap all these tests in a function like test_multi_lr_scheduler(), and use 
   ```
   if __name__ == '__main__':
       import nose
       nose.runmodule()
   
   # or maybe without nose
   if __name__ == '__main__':
       test_multi_step_lr_scheduler()
   ```
   to trigger all tests, we can have more unit test for lr_schedulers in the 
future.
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to