tchaton commented on issue #9648: BatchNorm Evaluation Mode Backward Fails with 
cudnn Enabled
URL: 
https://github.com/apache/incubator-mxnet/issues/9648#issuecomment-522504996
 
 
   ```
   import torch
   import torch.nn as nn
   import torch.nn.functional as F
   import math
   import numpy as np
   
   def _upsample(x):
       h, w = x.shape[2:]
       return F.upsample_bilinear(x, size=(h * 2, w * 2))
   
   
   def upsample_conv(x, conv):
       return conv(_upsample(x))
   
   class genBlock(nn.Module):
       def __init__(self, in_channels, out_channels,
                    activation=F.relu, hidden_channels=None, ksize=3, pad=1, 
upsample=False, n_classes=0):
           super(genBlock, self).__init__()
           self.activation = activation
           self.upsample = upsample
           self.learnable_sc = in_channels != out_channels or upsample
           hidden_channels = out_channels if hidden_channels is None else 
hidden_channels
           self.n_classes = n_classes
           self.c1 = nn.Conv2d(in_channels, hidden_channels, kernel_size=ksize, 
padding=pad)
           #nn.init.xavier_uniform_(self.c1.weight.data, math.sqrt(2))
           self.c2 = nn.Conv2d(hidden_channels, out_channels, 
kernel_size=ksize, padding=pad)
           #nn.init.xavier_uniform_(self.c2.weight.data, math.sqrt(2))
           self.b1 = nn.BatchNorm2d(in_channels)
           self.b2 = nn.BatchNorm2d(hidden_channels)
           if self.learnable_sc:
               self.c_sc = nn.Conv2d(in_channels, out_channels, 
kernel_size=ksize, padding=pad)
       def residual(self, x):
           h = x
           h = self.b1(h)
           h = self.activation(h)
           h = upsample_conv(h, self.c1) if self.upsample else self.c1(h)
           h = self.b2(h)
           h = self.activation(h)
           h = self.c2(h)
           return h
   
       def shortcut(self, x):
           if self.learnable_sc:
               x = upsample_conv(x, self.c_sc) if self.upsample else 
self.c_sc(x)
               return x
           else:
               return x
   
       def forward(self, input):
           return self.residual(input) + self.shortcut(input)
   if __name__== "__main__":
   
       noise = torch.randn(1,256, 4, 4).cuda()
       g = genBlock(256, 256, activation=F.relu, upsample=True).cuda()
       #g.apply(weights_init)
       out = g(noise)
       print(out.shape)
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to