Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- class ConvBlock(DeepModel):
- __scope__ = "model.convblock"
- with Settings.default.scope(__scope__) as hparams:
- hparams.kernel_size = (3, 3)
- hparams.dropout = 0.25
- hparams.batch_norm = True
- hparams.stride = 1
- hparams.activation = "relu"
- def __init__(self, **kwargs):
- super(ConvBlock, self).__init__(**kwargs)
- with self.scope() as p:
- # setup default value
- if p.filters is None:
- p.filters = p.in_channels
- layers = []
- # build standard convolution block
- p.padding = [dim_size // 2 for dim_size in p.kernel_size]
- layers.append(
- nn.Conv2d(
- p.in_channels, p.filters, p.kernel_size, p.stride, padding=p.padding
- )
- )
- if p.batch_norm:
- layers.append(nn.BatchNorm2d(p.filters))
- if p.activation != False:
- layers.append(Activation(p.activation))
- if p.dropout > 0:
- layers.append(nn.Dropout2d(p.dropout))
- # return sequential network
- self.conv = nn.Sequential(*layers)
- def output_shape(self, input_shape: tuple) -> tuple:
- assert type(input_shape) == tuple
- h_in, w_in = input_shape
- with self.scope() as p:
- h_out = (h_in + 2 * p.padding[0] - p.kernel_size[0]) // p.stride + 1
- w_out = (w_in + 2 * p.padding[1] - p.kernel_size[1]) // p.stride + 1
- return (h_out, w_out)
- def forward(self, x):
- return self.conv(x)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement