Skip to content

Commit b9c6178

Browse files
pmeierpytorchmergebot
authored andcommitted
remove make_non_contiguous from torch.testing (#87973)
See #87969 or #86586 for the reasoning. Pull Request resolved: #87973 Approved by: https://github.com/mruberry
1 parent 8893c6c commit b9c6178

File tree

1 file changed

+1
-43
lines changed

1 file changed

+1
-43
lines changed

torch/testing/_deprecated.py

Lines changed: 1 addition & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,13 @@
44
"""
55

66
import functools
7-
import random
87
import warnings
98
from typing import Any, Callable, Dict, Optional, Tuple, Union
109

1110
import torch
1211

1312

14-
__all__ = [
15-
"assert_allclose",
16-
"make_non_contiguous",
17-
]
13+
__all__ = ["assert_allclose"]
1814

1915

2016
def warn_deprecated(instructions: Union[str, Callable[[str, Tuple[Any, ...], Dict[str, Any], Any], str]]) -> Callable:
@@ -79,41 +75,3 @@ def assert_allclose(
7975
check_stride=False,
8076
msg=msg or None,
8177
)
82-
83-
84-
@warn_deprecated(
85-
"Depending on the use case there a different replacement options:\n\n"
86-
"- If you are using `make_non_contiguous` in combination with a creation function to create a noncontiguous tensor "
87-
"with random values, use `torch.testing.make_tensor(..., noncontiguous=True)` instead.\n"
88-
"- If you are using `make_non_contiguous` with a specific tensor, you can replace this call with "
89-
"`torch.repeat_interleave(input, 2, dim=-1)[..., ::2]`.\n"
90-
"- If you are using `make_non_contiguous` in the PyTorch test suite, use "
91-
"`torch.testing._internal.common_utils.noncontiguous_like` instead."
92-
)
93-
def make_non_contiguous(tensor: torch.Tensor) -> torch.Tensor:
94-
if tensor.numel() <= 1: # can't make non-contiguous
95-
return tensor.clone()
96-
osize = list(tensor.size())
97-
98-
# randomly inflate a few dimensions in osize
99-
for _ in range(2):
100-
dim = random.randint(0, len(osize) - 1)
101-
add = random.randint(4, 15)
102-
osize[dim] = osize[dim] + add
103-
104-
# narrow doesn't make a non-contiguous tensor if we only narrow the 0-th dimension,
105-
# (which will always happen with a 1-dimensional tensor), so let's make a new
106-
# right-most dimension and cut it off
107-
108-
input = tensor.new(torch.Size(osize + [random.randint(2, 3)]))
109-
input = input.select(len(input.size()) - 1, random.randint(0, 1))
110-
# now extract the input of correct size from 'input'
111-
for i in range(len(osize)):
112-
if input.size(i) != tensor.size(i):
113-
bounds = random.randint(1, input.size(i) - tensor.size(i))
114-
input = input.narrow(i, bounds, tensor.size(i))
115-
116-
input.copy_(tensor)
117-
118-
# Use .data here to hide the view relation between input and other temporary Tensors
119-
return input.data

0 commit comments

Comments
 (0)