Advertisement
Guest User

Untitled

a guest
Oct 12th, 2017
73
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.52 KB | None | 0 0
  1. def __init__(self, bn=False):
  2. super(WSDR, self).__init__()
  3. # Network based on VGG16
  4. # ConvReLU means a conv layer following a relu
  5. self.conv1 = nn.Sequential(ConvReLU(3, 64, 3, pd=True, bn=bn),
  6. ConvReLU(64, 64, 3, pd=True, bn=bn),
  7. nn.MaxPool2d(2))
  8. self.conv2 = nn.Sequential(ConvReLU(64, 128, 3, pd=True, bn=bn),
  9. ConvReLU(128, 128, 3, pd=True, bn=bn),
  10. nn.MaxPool2d(2))
  11. self.conv3 = nn.Sequential(ConvReLU(128, 256, 3, pd=True, bn=bn),
  12. ConvReLU(256, 256, 3, pd=True, bn=bn),
  13. ConvReLU(256, 256, 3, pd=True, bn=bn),
  14. nn.MaxPool2d(2))
  15. self.conv4 = nn.Sequential(ConvReLU(256, 512, 3, pd=True, bn=bn),
  16. ConvReLU(512, 512, 3, pd=True, bn=bn),
  17. ConvReLU(512, 512, 3, pd=True, bn=bn),
  18. nn.MaxPool2d(2))
  19. self.conv5 = nn.Sequential(ConvReLU(512, 512, 3, pd=True, bn=bn),
  20. ConvReLU(512, 512, 3, pd=True, bn=bn),
  21. ConvReLU(512, 512, 3, pd=True, bn=bn))
  22.  
  23. self.gap = nn.Sequential(ConvReLU(512, 1024, 3, pd=True, bn=bn),
  24. ConvReLU(1024, 20, 3, pd=True, bn=bn),
  25. nn.AvgPool2d(kernel_size=14, stride=14))
  26. # adding this fc layer, things go right
  27. # self.fc = nn.Linear(self.num_classes, self.num_classes)
  28.  
  29. def forward(self, im_data):
  30. x = self.conv1(im_data)
  31. x = self.conv2(x)
  32. x = self.conv3(x)
  33. x = self.conv4(x)
  34. conv5features = self.conv5(x)
  35. gap = self.gap(conv5features)
  36. scores = gap.squeeze()
  37. # scores = self.fc(scores)
  38. return scores
  39.  
  40. class ConvReLU(nn.Module):
  41. def __init__(self, in_ch, out_ch, kernel_sz, stride=1, relu=True, pd=True, bn=False):
  42. super(ConvReLU, self).__init__()
  43. padding = int((kernel_sz - 1) / 2) if pd else 0 # same spatial size by default
  44. self.conv = nn.Conv2d(in_ch, out_ch, kernel_sz, stride, padding=padding)
  45. self.bn = nn.BatchNorm2d(out_ch, eps=0.001, momentum=0, affine=True) if bn else None
  46. self.relu = nn.ReLU(inplace=True) if relu else None
  47.  
  48. def forward(self, x):
  49. x = self.conv(x)
  50. if self.bn is not None:
  51. x = self.bn(x)
  52. if self.relu is not None:
  53. x = self.relu(x)
  54. return x
  55.  
  56. # compute output
  57. output = model(input_var) # the above model, input_var:image data
  58. loss = F.multilabel_soft_margin_loss(output, target_var)
  59. # compute gradient and do SGD step
  60. optimizer.zero_grad()
  61. loss.backward()
  62. optimizer.step()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement