Guest User

Untitled

a guest
Jul 18th, 2018
99
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.08 KB | None | 0 0
  1. import torch
  2.  
  3. x = torch.randn(3, 3, requires_grad=True)
  4. print(x)
  5.  
  6. min = float('nan')
  7. max = 0.0
  8. y = torch.clamp(x, min, max)
  9. print('y', y)
  10. y.sum().backward()
  11. print('x.grad', x.grad)
  12.  
  13. y = torch.clamp(x, max=max)
  14. y.sum().backward()
  15. print('y', y)
  16. print('x.grad', x.grad)
  17.  
  18. =====
  19.  
  20. import torch
  21.  
  22. x = torch.randn(3, 3, requires_grad=True)
  23. print(x)
  24.  
  25. min = None
  26. max = 0.0
  27. y = torch.clamp(x, min, max)
  28. print('y', y)
  29. y.sum().backward()
  30. print('x.grad', x.grad)
  31.  
  32. y = torch.clamp(x, max=max)
  33. y.sum().backward()
  34. print('y', y)
  35. print('x.grad', x.grad)
  36.  
  37.  
  38.  
  39.  
  40.  
  41. =====
  42.  
  43. tensor([[ 1.8375, 1.0008, -0.6007],
  44. [-0.3602, 0.4946, -1.0250],
  45. [-0.6579, 0.8329, 0.5419]], requires_grad=True)
  46. y tensor([[ 0.0000, 0.0000, -0.6007],
  47. [-0.3602, 0.0000, -1.0250],
  48. [-0.6579, 0.0000, 0.0000]], grad_fn=<ClampBackward>)
  49. x.grad tensor([[0., 0., 0.],
  50. [0., 0., 0.],
  51. [0., 0., 0.]])
  52. y tensor([[ 0.0000, 0.0000, -0.6007],
  53. [-0.3602, 0.0000, -1.0250],
  54. [-0.6579, 0.0000, 0.0000]], grad_fn=<ClampMaxBackward>)
  55. x.grad tensor([[0., 0., 1.],
  56. [1., 0., 1.],
  57. [1., 0., 0.]])
Add Comment
Please, Sign In to add comment