SHARE
TWEET

Untitled

a guest Jun 18th, 2019 61 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. import torch
  2. import torch.nn as nn
  3. import torch.nn.functional as F
  4. from collections import OrderedDict
  5. import numpy as np
  6. import os
  7.  
  8. class Flatten(nn.Module):
  9.  
  10.     def __init__(self):
  11.         super(Flatten, self).__init__()
  12.  
  13.     def forward(self, x):
  14.         """
  15.         Arguments:
  16.             x: a float tensor with shape [batch_size, c, h, w].
  17.         Returns:
  18.             a float tensor with shape [batch_size, c*h*w].
  19.         """
  20.  
  21.         # without this pretrained model isn't working
  22.         x = x.transpose(3, 2).contiguous()
  23.  
  24.         return x.view(x.size(0), -1)
  25.  
  26.  
  27. class PNet(nn.Module):
  28.  
  29.     def __init__(self):
  30.  
  31.         super(PNet, self).__init__()
  32.  
  33.         # suppose we have input with size HxW, then
  34.         # after first layer: H - 2,
  35.         # after pool: ceil((H - 2)/2),
  36.         # after second conv: ceil((H - 2)/2) - 2,
  37.         # after last conv: ceil((H - 2)/2) - 4,
  38.         # and the same for W
  39.  
  40.         self.features = nn.Sequential(OrderedDict([
  41.             ('conv1', nn.Conv2d(3, 10, 3, 1)),
  42.             ('prelu1', nn.PReLU(10)),
  43.             ('pool1', nn.MaxPool2d(2, 2, ceil_mode=True)),
  44.  
  45.             ('conv2', nn.Conv2d(10, 16, 3, 1)),
  46.             ('prelu2', nn.PReLU(16)),
  47.  
  48.             ('conv3', nn.Conv2d(16, 32, 3, 1)),
  49.             ('prelu3', nn.PReLU(32))
  50.         ]))
  51.  
  52.         self.conv4_1 = nn.Conv2d(32, 2, 1, 1)
  53.         self.conv4_2 = nn.Conv2d(32, 4, 1, 1)
  54.         pnetPath = os.path.join( os.path.dirname(__file__), 'weights/pnet.npy')
  55.         weights = np.load(pnetPath, allow_pickle=True)[()]
  56.         for n, p in self.named_parameters():
  57.             p.data = torch.FloatTensor(weights[n])
  58.  
  59.     def forward(self, x):
  60.         """
  61.         Arguments:
  62.             x: a float tensor with shape [batch_size, 3, h, w].
  63.         Returns:
  64.             b: a float tensor with shape [batch_size, 4, h', w'].
  65.             a: a float tensor with shape [batch_size, 2, h', w'].
  66.         """
  67.         x = self.features(x)
  68.         a = self.conv4_1(x)
  69.         b = self.conv4_2(x)
  70.         a = F.softmax(a)
  71.         return b, a
  72.  
  73.  
  74. class RNet(nn.Module):
  75.  
  76.     def __init__(self):
  77.  
  78.         super(RNet, self).__init__()
  79.  
  80.         self.features = nn.Sequential(OrderedDict([
  81.             ('conv1', nn.Conv2d(3, 28, 3, 1)),
  82.             ('prelu1', nn.PReLU(28)),
  83.             ('pool1', nn.MaxPool2d(3, 2, ceil_mode=True)),
  84.  
  85.             ('conv2', nn.Conv2d(28, 48, 3, 1)),
  86.             ('prelu2', nn.PReLU(48)),
  87.             ('pool2', nn.MaxPool2d(3, 2, ceil_mode=True)),
  88.  
  89.             ('conv3', nn.Conv2d(48, 64, 2, 1)),
  90.             ('prelu3', nn.PReLU(64)),
  91.  
  92.             ('flatten', Flatten()),
  93.             ('conv4', nn.Linear(576, 128)),
  94.             ('prelu4', nn.PReLU(128))
  95.         ]))
  96.  
  97.         self.conv5_1 = nn.Linear(128, 2)
  98.         self.conv5_2 = nn.Linear(128, 4)
  99.  
  100.         rnetPath = os.path.join( os.path.dirname(__file__), 'weights/rnet.npy')
  101.         weights = np.load(rnetPath)[()]
  102.         for n, p in self.named_parameters():
  103.             p.data = torch.FloatTensor(weights[n])
  104.  
  105.     def forward(self, x):
  106.         """
  107.         Arguments:
  108.             x: a float tensor with shape [batch_size, 3, h, w].
  109.         Returns:
  110.             b: a float tensor with shape [batch_size, 4].
  111.             a: a float tensor with shape [batch_size, 2].
  112.         """
  113.         x = self.features(x)
  114.         a = self.conv5_1(x)
  115.         b = self.conv5_2(x)
  116.         a = F.softmax(a)
  117.         return b, a
  118.  
  119.  
  120. class ONet(nn.Module):
  121.  
  122.     def __init__(self):
  123.  
  124.         super(ONet, self).__init__()
  125.  
  126.         self.features = nn.Sequential(OrderedDict([
  127.             ('conv1', nn.Conv2d(3, 32, 3, 1)),
  128.             ('prelu1', nn.PReLU(32)),
  129.             ('pool1', nn.MaxPool2d(3, 2, ceil_mode=True)),
  130.  
  131.             ('conv2', nn.Conv2d(32, 64, 3, 1)),
  132.             ('prelu2', nn.PReLU(64)),
  133.             ('pool2', nn.MaxPool2d(3, 2, ceil_mode=True)),
  134.  
  135.             ('conv3', nn.Conv2d(64, 64, 3, 1)),
  136.             ('prelu3', nn.PReLU(64)),
  137.             ('pool3', nn.MaxPool2d(2, 2, ceil_mode=True)),
  138.  
  139.             ('conv4', nn.Conv2d(64, 128, 2, 1)),
  140.             ('prelu4', nn.PReLU(128)),
  141.  
  142.             ('flatten', Flatten()),
  143.             ('conv5', nn.Linear(1152, 256)),
  144.             ('drop5', nn.Dropout(0.25)),
  145.             ('prelu5', nn.PReLU(256)),
  146.         ]))
  147.  
  148.         self.conv6_1 = nn.Linear(256, 2)
  149.         self.conv6_2 = nn.Linear(256, 4)
  150.         self.conv6_3 = nn.Linear(256, 10)
  151.        
  152.         onetPath = os.path.join( os.path.dirname(__file__), 'weights/onet.npy')
  153.         weights = np.load( onetPath )[()]
  154.         for n, p in self.named_parameters():
  155.             p.data = torch.FloatTensor(weights[n])
  156.  
  157.     def forward(self, x):
  158.         """
  159.         Arguments:
  160.             x: a float tensor with shape [batch_size, 3, h, w].
  161.         Returns:
  162.             c: a float tensor with shape [batch_size, 10].
  163.             b: a float tensor with shape [batch_size, 4].
  164.             a: a float tensor with shape [batch_size, 2].
  165.         """
  166.         x = self.features(x)
  167.         a = self.conv6_1(x)
  168.         b = self.conv6_2(x)
  169.         c = self.conv6_3(x)
  170.         a = F.softmax(a)
  171.         return c, b, a
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top