Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions torch/_refs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6607,8 +6607,10 @@ def tensor(data, *, dtype=None, device=None, pin_memory=False, requires_grad=Fal
# TODO (or not): support names kwarg
if isinstance(data, torch.Tensor):
warnings.warn(
"To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() "
"or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor)"
"To copy construct from a tensor, it is recommended to use sourceTensor.detach().clone() "
"or sourceTensor.detach().clone().requires_grad_(True), rather than torch.tensor(sourceTensor)",
UserWarning,
stacklevel=2,
)
type_inference = dtype is None
new_tensor = _internal_new_from_data(
Expand Down
6 changes: 3 additions & 3 deletions torch/_tensor_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@ def add_docstr_all(method, docstr):
.. warning::

When data is a tensor `x`, :func:`new_tensor()` reads out 'the data' from whatever it is passed,
and constructs a leaf variable. Therefore ``tensor.new_tensor(x)`` is equivalent to ``x.clone().detach()``
and ``tensor.new_tensor(x, requires_grad=True)`` is equivalent to ``x.clone().detach().requires_grad_(True)``.
The equivalents using ``clone()`` and ``detach()`` are recommended.
and constructs a leaf variable. Therefore ``tensor.new_tensor(x)`` is equivalent to ``x.detach().clone()``
and ``tensor.new_tensor(x, requires_grad=True)`` is equivalent to ``x.detach().clone().requires_grad_(True)``.
The equivalents using ``detach()`` and ``clone()`` are recommended.

Args:
data (array_like): The returned Tensor copies :attr:`data`.
Expand Down
4 changes: 2 additions & 2 deletions torch/_torch_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9034,8 +9034,8 @@ def merge_dicts(*dicts):
When working with tensors prefer using :func:`torch.Tensor.clone`,
:func:`torch.Tensor.detach`, and :func:`torch.Tensor.requires_grad_` for
readability. Letting `t` be a tensor, ``torch.tensor(t)`` is equivalent to
``t.clone().detach()``, and ``torch.tensor(t, requires_grad=True)``
is equivalent to ``t.clone().detach().requires_grad_(True)``.
``t.detach().clone()``, and ``torch.tensor(t, requires_grad=True)``
is equivalent to ``t.detach().clone().requires_grad_(True)``.

.. seealso::

Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/utils/tensor_new.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1458,8 +1458,8 @@ Tensor tensor_ctor(
if (THPVariable_Check(data)) {
auto ret = PyErr_WarnEx(
PyExc_UserWarning,
"To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() "
"or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).",
"To copy construct from a tensor, it is recommended to use sourceTensor.detach().clone() "
"or sourceTensor.detach().clone().requires_grad_(True), rather than torch.tensor(sourceTensor).",
1);
if (ret != 0)
throw python_error();
Expand Down
2 changes: 1 addition & 1 deletion torch/masked/maskedtensor/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def __new__(cls, data, mask, requires_grad=False):
if data.requires_grad:
warnings.warn(
"It is not recommended to create a MaskedTensor with a tensor that requires_grad. "
"To avoid this, you can use data.clone().detach()",
"To avoid this, you can use data.detach().clone()",
UserWarning,
stacklevel=2,
)
Expand Down
2 changes: 1 addition & 1 deletion torch/testing/_internal/common_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2308,7 +2308,7 @@ def __init__(self, assert_eq_kwargs=None):

def add(self, tensor):
"""
Add a clone().detach()'d version of the tensor
Add a detach().clone()'d version of the tensor
"""
self.tensors.append(tensor.detach().clone())

Expand Down
8 changes: 4 additions & 4 deletions torch/testing/_internal/opinfo/definitions/nested.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def _raggedness_matches(nt1, nt2):
# as this causes autograd problems.
def _clone(t):
requires_grad = t.requires_grad
return t.clone().detach().requires_grad_(requires_grad)
return t.detach().clone().requires_grad_(requires_grad)


# Helper function to update a sample with new kwargs / name
Expand Down Expand Up @@ -1316,10 +1316,10 @@ def _get_njts():
# non-contiguous transposed
yield njt.transpose(1, 3)
# non-contiguous with holes
values = njt.values().clone().detach()
offsets = njt.offsets().clone().detach()
values = njt.values().detach().clone()
offsets = njt.offsets().detach().clone()
# subtract 1 to cause holes
lengths = (offsets.diff() - 1).clone().detach()
lengths = (offsets.diff() - 1).detach().clone()
yield torch.nested.nested_tensor_from_jagged(
values=values,
offsets=offsets,
Expand Down
Loading