-
Notifications
You must be signed in to change notification settings - Fork 26.3k
Closed
Description
🐛 Bug
torch.autograd.gradcheck doesn't work for a function with one argument.
To Reproduce
Steps to reproduce the behavior:
import torch
from torch.autograd import Function, gradcheck
class Sum1(Function):
@staticmethod
def forward(ctx, x):
ctx.save_for_backward(x)
return x.sum()
@staticmethod
def backward(ctx, grad_output):
grad_x = None
x = ctx.saved_tensors
if ctx.needs_input_grad[0]:
grad_x = torch.ones_like(x) * grad_output
return grad_x
class Sum2(Function):
@staticmethod
def forward(ctx, x, y):
ctx.save_for_backward(x)
return x.sum()
@staticmethod
def backward(ctx, grad_output):
grad_x = grad_y = None
x = ctx.saved_tensors
if ctx.needs_input_grad[0]:
grad_x = torch.ones_like(x) * grad_output
return grad_x, grad_y
class Sum3(Function):
@staticmethod
def forward(ctx, x, y):
ctx.save_for_backward(x)
return x.sum()
@staticmethod
def backward(ctx, grad_output):
grad_x = grad_y = None
x = ctx.saved_tensors[0]
if ctx.needs_input_grad[0]:
grad_x = torch.ones_like(x) * grad_output
return grad_x, grad_y
sum1 = Sum1.apply
sum2 = Sum2.apply
sum3 = Sum3.apply
a = torch.rand(10, dtype=torch.double, requires_grad=True)
b = torch.rand(10, dtype=torch.double)
gradcheck(sum3, (a,b))
gradcheck(sum2, (a,b))
gradcheck(sum1, a)
gradcheck(sum1, (a))Expected behavior
gradcheck(sum3, (a,b))works.gradcheck(sum2, (a,b))fails becausectx.saved_tensorsis a tuple, but I am not sure if it is an intended behavior.
~/miniconda3/envs/pytorch-nightly/lib/python3.6/site-packages/torch/autograd/gradcheck.py in gradcheck(func, inputs, eps, atol, rtol, raise_exception)
193 return _as_tuple(func(*input))[i]
194
--> 195 analytical, reentrant, correct_grad_sizes = get_analytical_jacobian(tupled_inputs, o)
196 numerical = get_numerical_jacobian(fn, inputs, eps=eps)
197
~/miniconda3/envs/pytorch-nightly/lib/python3.6/site-packages/torch/autograd/gradcheck.py in get_analytical_jacobian(input, output)
94 for jacobian_c in (jacobian, jacobian_reentrant):
95 grads_input = torch.autograd.grad(output, diff_input_list, grad_output,
---> 96 retain_graph=True, allow_unused=True)
97 for jacobian_x, d_x, x in zip(jacobian_c, grads_input, diff_input_list):
98 if d_x is not None and d_x.size() != x.size():
~/miniconda3/envs/pytorch-nightly/lib/python3.6/site-packages/torch/autograd/__init__.py in grad(outputs, inputs, grad_outputs, retain_graph, create_graph, only_inputs, allow_unused)
143 return Variable._execution_engine.run_backward(
144 outputs, grad_outputs, retain_graph, create_graph,
--> 145 inputs, allow_unused)
146
147
~/miniconda3/envs/pytorch-nightly/lib/python3.6/site-packages/torch/autograd/function.py in apply(self, *args)
74
75 def apply(self, *args):
---> 76 return self._forward_cls.backward(self, *args)
77
78
<ipython-input-1-1f2fe1d5eb24> in backward(ctx, grad_output)
32
33 if ctx.needs_input_grad[0]:
---> 34 grad_x = torch.ones_like(x) * grad_output
35
36 return grad_x, grad_y
TypeError: ones_like() received an invalid combination of arguments - got (tuple), but expected one of:
* (Tensor input, torch.dtype dtype, torch.layout layout, torch.device device, bool requires_grad)
* (Tensor input, bool requires_grad)gradcheck(sum1, a)andgradcheck(sum1, (a))fail becausefunc(*inputs)is expanded over the zeroth dimension of the tensor.
~/miniconda3/envs/pytorch-nightly/lib/python3.6/site-packages/torch/autograd/gradcheck.py in gradcheck(func, inputs, eps, atol, rtol, raise_exception)
179 'but none of the them have requires_grad=True.')
180
--> 181 output = _differentiable_outputs(func(*inputs))
182
183 def fail_test(msg):
TypeError: forward() takes 2 positional arguments but 11 were givenEnvironment
- PyTorch Version: 1.0.0.dev20181102
- OS: Ubuntu 16.04.5 LTS
- GCC version: (GCC) 4.8.5
- CMake version: version 3.11.1
- How you installed PyTorch (
conda,pip, source):pip torch_nightly - Python version:
3.6.6
Metadata
Metadata
Assignees
Labels
No labels