Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- alpha = torch.rand(real_x.size(0), 1, 1, 1).cuda().expand_as(real_x)
- beta = torch.rand(real_x.size())
- y = real_x.data + 0.5 * real_x.data.std() * beta
- interpolated = real_x.data + alpha * (y - real_x.data)
- interpolated = Variable(interpolated, requires_grad=True)
- out, out_cls = self.D(interpolated)
- grad = torch.autograd.grad(outputs=[out, out_cls], inputs=interpolated,
- grad_outputs=[torch.ones(out.size()).cuda(), torch.ones(out_cls.size()).cuda()],
- retain_graph=True, create_graph=True, only_inputs=True)[0]
- grad = grad.view(grad.size(0), -1)
- d_loss_gp = ((grad.norm(p=2, dim=1) - 1) ** 2).mean()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement