Advertisement
Guest User

Untitled

a guest
Apr 8th, 2020
187
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.60 KB | None | 0 0
  1.  
  2. class ConvBlock(DeepModel):
  3.     __scope__ = "model.convblock"
  4.  
  5.     with Settings.default.scope(__scope__) as hparams:
  6.         hparams.kernel_size = (3, 3)
  7.         hparams.dropout = 0.25
  8.         hparams.batch_norm = True
  9.         hparams.stride = 1
  10.         hparams.activation = "relu"
  11.  
  12.     def __init__(self, **kwargs):
  13.         super(ConvBlock, self).__init__(**kwargs)
  14.  
  15.         with self.scope() as p:
  16.             # setup default value
  17.             if p.filters is None:
  18.                 p.filters = p.in_channels
  19.  
  20.             layers = []
  21.             # build standard convolution block
  22.             p.padding = [dim_size // 2 for dim_size in p.kernel_size]
  23.             layers.append(
  24.                 nn.Conv2d(
  25.                     p.in_channels, p.filters, p.kernel_size, p.stride, padding=p.padding
  26.                 )
  27.             )
  28.             if p.batch_norm:
  29.                 layers.append(nn.BatchNorm2d(p.filters))
  30.             if p.activation != False:
  31.                 layers.append(Activation(p.activation))
  32.             if p.dropout > 0:
  33.                 layers.append(nn.Dropout2d(p.dropout))
  34.  
  35.             # return sequential network
  36.             self.conv = nn.Sequential(*layers)
  37.  
  38.     def output_shape(self, input_shape: tuple) -> tuple:
  39.         assert type(input_shape) == tuple
  40.  
  41.         h_in, w_in = input_shape
  42.         with self.scope() as p:
  43.             h_out = (h_in + 2 * p.padding[0] - p.kernel_size[0]) // p.stride + 1
  44.             w_out = (w_in + 2 * p.padding[1] - p.kernel_size[1]) // p.stride + 1
  45.         return (h_out, w_out)
  46.  
  47.     def forward(self, x):
  48.         return self.conv(x)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement