Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #Не работает
- def tryer(i):
- body = []
- header = [nn.Linear(len(feature_columns), 100),
- nn.ReLU(),
- nn.Dropout(p=0.5)]
- tail = [
- nn.Linear(100, 3)]
- for _ in range(i):
- body += [
- nn.Linear(100, 100),
- nn.ReLU(),
- nn.BatchNorm1d(100)]
- K = [*header, *body, *tail]
- model = nn.Sequential(*K)
- loss_fn = torch.nn.CrossEntropyLoss()
- optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
- train_losses, test_losses = train(20)
- plt.plot(train_losses)
- plt.show()
- m = min(train_losses)
- return [m, i]
- mi = []
- it = []
- for i in range(5):
- torch.manual_seed(42)
- np.random.seed(42)
- if(i == 0):
- continue
- T = tryer(i)
- mi.append(T[0])
- it.append(T[1])
- plt.plot(mi)
- #Работает
- mi = []
- for i in range(10):
- torch.manual_seed(42)
- np.random.seed(42)
- body = []
- header = [nn.Linear(len(feature_columns), 100),
- nn.ReLU(),
- nn.Dropout(p=0.5)]
- tail = [
- nn.Linear(100, 3)]
- for _ in range(i):
- body += [
- nn.Linear(100, 100),
- nn.ReLU(),
- nn.BatchNorm1d(100)]
- K = [*header, *body, *tail]
- model = nn.Sequential(*K)
- loss_fn = torch.nn.CrossEntropyLoss()
- optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
- train_losses, test_losses = train(20)
- m = min(train_losses)
- mi.append(m)
- plt.plot(train_losses)
- plt.show()
- plt.plot(mi)
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement