Skip to content

Commit

Permalink
Fix _check_no_differentiable_outputs for forward ad (pytorch#91391)
Browse files Browse the repository at this point in the history
This `is_forward_ad` isn't propagated, which leads to this line creating a
slow-gradcheck failure on master:
```
    if not is_forward_ad and any(o.is_complex() for o in outputs):
        raise ValueError("Expected output to be non-complex. get_numerical_jacobian no "
                         "longer supports functions that return complex outputs.")
```

Pull Request resolved: pytorch#91391
Approved by: https://github.com/albanD
  • Loading branch information
peterbell10 authored and pytorchmergebot committed Dec 27, 2022
1 parent a061f13 commit bb24185
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions torch/autograd/gradcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -725,10 +725,11 @@ def _check_outputs(outputs) -> None:
'Please call to_dense() on the output of fn for gradcheck.')


def _check_no_differentiable_outputs(func, inputs, func_out, eps) -> bool:
def _check_no_differentiable_outputs(func, inputs, func_out, eps, *, is_forward_ad) -> bool:
# When there are no differentiable outputs, numerical gradient for a function is
# expected to be zero.
jacobians_all_inputs_outputs = _get_numerical_jacobian(func, inputs, func_out, eps=eps)
jacobians_all_inputs_outputs = _get_numerical_jacobian(func, inputs, func_out,
eps=eps, is_forward_ad=is_forward_ad)
for jacobians_all_outputs_and_fixed_input in jacobians_all_inputs_outputs:
for jacobian in jacobians_all_outputs_and_fixed_input:
if torch.ne(jacobian, 0).sum() > 0:
Expand Down Expand Up @@ -1143,7 +1144,8 @@ def _slow_gradcheck(func, func_out, tupled_inputs, outputs, eps, rtol, atol, che
nondet_tol, *, use_forward_ad=False, complex_indices=None, test_imag=False):
func_out = _as_tuple(func_out)
if not outputs:
return _check_no_differentiable_outputs(func, tupled_inputs, func_out, eps)
return _check_no_differentiable_outputs(func, tupled_inputs, func_out,
eps=eps, is_forward_ad=use_forward_ad)

numerical = _transpose(_get_numerical_jacobian(func, tupled_inputs, func_out, eps=eps, is_forward_ad=use_forward_ad))
# Note: [numerical vs analytical output length]
Expand Down

0 comments on commit bb24185

Please sign in to comment.