Guest User

Untitled

a guest
Jul 19th, 2018
86
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.95 KB | None | 0 0
  1. def __init__ (self):
  2. super(Net, self).__init__()
  3. self.conv1 = nn.Conv1d(in_channels = n_input_channels, out_channels = 40, kernel_size = 9, padding= 4)
  4. self.conv2 = nn.Conv1d(in_channels = 40, out_channels = 80, kernel_size = 3) #.double
  5. self.pool1 = nn.MaxPool1d(3)
  6. self.fc1 = nn.Linear(2484, 200)
  7. self.fc2 = nn.Linear(200, 200)
  8. self.fc3 = nn.Linear(200, 200)
  9. self.fc4 = nn.Linear(200, 99)
  10.  
  11. def forward(self, x):
  12. x2 = x[:, :, :4].contiguous().view(x.size(0), -1)
  13. x1 = x[:, :, 4:]
  14.  
  15.  
  16. x1 = F.relu(self.conv1(x1))
  17. x1 = self.pool1(x1)
  18. x1 = F.relu(self.conv2(x1))
  19. x1 = x1.view(x1.size(0), -1
  20.  
  21. x = torch.cat((x1,x2), 1)
  22.  
  23. x = F.relu(self.fc1(x))
  24. x = F.relu(self.fc2(x))
  25. x = F.relu(self.fc3(x))
  26. x = F.softplus(self.fc4(x))
  27. return x
  28.  
  29. def init_weights(m):
  30. if type(m) == nn.Linear:
  31. nn.init.xavier_uniform_(m.weight)
  32. m.bias.data.fill_(0.01)
  33.  
  34.  
  35. net = Net()
  36. net.apply(init_weights)
Add Comment
Please, Sign In to add comment