Advertisement
toropyga

NN_base_project

Jun 26th, 2022
878
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. num_features = 20
  2. n_hidden_neurons_1 = 30
  3. n_hidden_neurons_2 = 10
  4. n_out_neurons = 1
  5.  
  6.  
  7. class Net(nn.Module):
  8.     def __init__(self):
  9.         super(Net, self).__init__()
  10.         self.layers = nn.Sequential(
  11.             nn.Linear(num_features, n_hidden_neurons_1),
  12.             nn.BatchNorm1d(n_hidden_neurons_1),
  13.             nn.ReLU(),
  14.             nn.Linear(n_hidden_neurons_1, n_hidden_neurons_2),
  15.             nn.BatchNorm1d(n_hidden_neurons_2),
  16.             nn.ReLU(),
  17.             nn.Linear(n_hidden_neurons_2, n_out_neurons),
  18.         )
  19.  
  20.     def forward(self, inputs):
  21.         return self.layers(inputs)
  22.  
  23. net = Net()
  24. net.layers.apply(init_weights)
  25. optimizer = torch.optim.Adam(net.parameters(), lr=0.001)
  26. loss = nn.MSELoss(reduce="sum")
  27.  
  28. num_epochs = 500
  29.  
  30. for epoch in range(num_epochs):
  31.     optimizer.zero_grad()
  32.  
  33.     preds = net(X_train)
  34.  
  35.     loss_value = loss(preds, y_train)
  36.     loss_value.backward()
  37.  
  38.     optimizer.step()
  39.  
  40.  
Advertisement
RAW Paste Data Copied
Advertisement