Skip to content

Commit

Permalink
Revert "[MTIA] (3/n) Implement PyTorch APIs to query/reset device pea…
Browse files Browse the repository at this point in the history
…k memory usage (pytorch#143347)"

This reverts commit efe21ee.

Reverted pytorch#143347 on behalf of https://github.com/huydhn due to D67118173 has been backed out internally ([comment](pytorch#143347 (comment)))
  • Loading branch information
pytorchmergebot committed Dec 21, 2024
1 parent dabc956 commit c7d7eff
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 14 deletions.
1 change: 0 additions & 1 deletion docs/source/mtia.memory.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,3 @@ The MTIA backend is implemented out of the tree, only interfaces are be defined
:nosignatures:

memory_stats
max_memory_allocated
1 change: 0 additions & 1 deletion docs/source/mtia.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ The MTIA backend is implemented out of the tree, only interfaces are be defined
is_available
is_initialized
memory_stats
max_memory_allocated
get_device_capability
empty_cache
record_memory_history
Expand Down
23 changes: 11 additions & 12 deletions torch/mtia/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,29 @@
from ._utils import _get_device_index


def max_memory_allocated(device: Optional[_device_t] = None) -> int:
r"""Return the maximum memory allocated in bytes for a given device.
def memory_stats(device: Optional[_device_t] = None) -> Dict[str, Any]:
r"""Return a dictionary of MTIA memory allocator statistics for a given device.
Args:
device (torch.device, str, or int, optional) selected device. Returns
statistics for the current device, given by current_device(),
if device is None (default).
"""
if not is_initialized():
return 0
return memory_stats(device).get("dram", 0).get("peak_bytes", 0)
return {}
return torch._C._mtia_memoryStats(_get_device_index(device, optional=True))


def memory_stats(device: Optional[_device_t] = None) -> Dict[str, Any]:
r"""Return a dictionary of MTIA memory allocator statistics for a given device.
def max_memory_allocated(device: Optional[_device_t] = None) -> int:
r"""Return the maximum memory allocated in bytes for a given device.
Args:
device (torch.device, str, or int, optional) selected device. Returns
statistics for the current device, given by current_device(),
if device is None (default).
device (torch.device or int, optional): selected device. Returns
statistic for the current device, given by :func:`~torch.mtia.current_device`,
if :attr:`device` is ``None`` (default).
"""
if not is_initialized():
return {}
return torch._C._mtia_memoryStats(_get_device_index(device, optional=True))

return memory_stats(device=device).get("allocated_bytes.all.peak", 0)


__all__ = [
Expand Down

0 comments on commit c7d7eff

Please sign in to comment.