Guest User

Untitled

a guest
Nov 22nd, 2017
90
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.70 KB | None | 0 0
  1. import torch
  2. from torch.autograd import Variable
  3. import torch.autograd as autograd
  4. import torch.nn as nn
  5. import torch.nn.functional as F
  6.  
  7. v_in = Variable(torch.Tensor([0.1, 0.1]).view(2, 1), requires_grad=True)
  8.  
  9. def forward(v_in):
  10. f1 = lambda x: x * 2
  11. f2 = torch.nn.Linear(1, 1)
  12. grad_out = Variable(torch.ones(2, 1))
  13. gradient = torch.autograd.grad(outputs=f2(f1(v_in)), inputs=v_in,
  14. grad_outputs=grad_out,
  15. create_graph=True, retain_graph=True,
  16. only_inputs=True)[0]
  17. out = gradient.sum()
  18. return out
  19.  
  20. # returns false
  21. autograd.gradcheck(forward, [v_in])
  22.  
  23. forward(v_in).backward()
  24. v_in.grad # these are non-existent?
Add Comment
Please, Sign In to add comment