Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- What is the equivalent of extending autograd functionalities in mindspore ? And what is the equivalent module to autograd.Function in mindspore. Thank you.
- class adder(torch.autograd.Function):
- @staticmethod
- def forward(ctx, W_col, X_col):
- ctx.save_for_backward(W_col,X_col)
- output = -(W_col.unsqueeze(2)-X_col.unsqueeze(0)).abs().sum(1)
- return output
- @staticmethod
- def backward(ctx,grad_output):
- W_col,X_col = ctx.saved_tensors
- grad_W_col = ((X_col.unsqueeze(0)-W_col.unsqueeze(2))*grad_output.unsqueeze(1)).sum(2)
- grad_W_col = grad_W_col/grad_W_col.norm(p=2).clamp(min=1e-12)*math.sqrt(W_col.size(1)*W_col.size(0))/5
- grad_X_col = (-(X_col.unsqueeze(0)-W_col.unsqueeze(2)).clamp(-1,1)*grad_output.unsqueeze(1)).sum(0)
- return grad_W_col, grad_X_col
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement