Skip to content

Commit dbb2c57

Browse files
committed
[dtensor] delete unused torch_function
torch_function is not actually getting used yet today, deleting it first and we can revisit once we really need it [ghstack-poisoned]
1 parent ecd4186 commit dbb2c57

File tree

1 file changed

+0
-17
lines changed
  • torch/distributed/_tensor

1 file changed

+0
-17
lines changed

torch/distributed/_tensor/api.py

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -214,23 +214,6 @@ def __repr__(self):
214214
# TODO: consider all_gather the local tensors for better debugging
215215
return f"DTensor(local_tensor={self._local_tensor}, device_mesh={self._spec.mesh}, placements={self._spec.placements})"
216216

217-
@classmethod
218-
# pyre-fixme[3]: Return type must be annotated.
219-
# pyre-fixme[2]: Parameter must be annotated.
220-
def __torch_function__(cls, func, types, args=(), kwargs=None):
221-
if kwargs is None:
222-
kwargs = {}
223-
# if we find nn.functional name in dispatch op, dispatch to it instead,
224-
# this allow us to override some python level behaviors that wouldn't be
225-
# possible in __torch_dispatch__ level.
226-
if func.__name__ in DTensor._custom_dispatch_ops:
227-
# dispatch to the same table as the name should be different between
228-
# torch_function and torch_dispatch
229-
return DTensor._custom_dispatch_ops[func.__name__](*args, **kwargs)
230-
else:
231-
# if not, just do nothing here
232-
return super().__torch_function__(func, types, args, kwargs)
233-
234217
@classmethod
235218
# pyre-fixme[3]: Return type must be annotated.
236219
# pyre-fixme[2]: Parameter must be annotated.

0 commit comments

Comments
 (0)