Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import torch
- x = torch.randn(3, 3, requires_grad=True)
- print(x)
- min = float('nan')
- max = 0.0
- y = torch.clamp(x, min, max)
- print('y', y)
- y.sum().backward()
- print('x.grad', x.grad)
- y = torch.clamp(x, max=max)
- y.sum().backward()
- print('y', y)
- print('x.grad', x.grad)
- =====
- import torch
- x = torch.randn(3, 3, requires_grad=True)
- print(x)
- min = None
- max = 0.0
- y = torch.clamp(x, min, max)
- print('y', y)
- y.sum().backward()
- print('x.grad', x.grad)
- y = torch.clamp(x, max=max)
- y.sum().backward()
- print('y', y)
- print('x.grad', x.grad)
- =====
- tensor([[ 1.8375, 1.0008, -0.6007],
- [-0.3602, 0.4946, -1.0250],
- [-0.6579, 0.8329, 0.5419]], requires_grad=True)
- y tensor([[ 0.0000, 0.0000, -0.6007],
- [-0.3602, 0.0000, -1.0250],
- [-0.6579, 0.0000, 0.0000]], grad_fn=<ClampBackward>)
- x.grad tensor([[0., 0., 0.],
- [0., 0., 0.],
- [0., 0., 0.]])
- y tensor([[ 0.0000, 0.0000, -0.6007],
- [-0.3602, 0.0000, -1.0250],
- [-0.6579, 0.0000, 0.0000]], grad_fn=<ClampMaxBackward>)
- x.grad tensor([[0., 0., 1.],
- [1., 0., 1.],
- [1., 0., 0.]])
Add Comment
Please, Sign In to add comment