From cf92544e2067a210bdfb5b38bb3351c6c7562bfd Mon Sep 17 00:00:00 2001 From: Yi Wang Date: Thu, 25 Jul 2024 04:33:18 -0700 Subject: [PATCH] Calls tensor.untyped_storage instead of tesnor.storage (#59) --- pytorch_memlab/mem_reporter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch_memlab/mem_reporter.py b/pytorch_memlab/mem_reporter.py index 9ac3f8f..536d220 100644 --- a/pytorch_memlab/mem_reporter.py +++ b/pytorch_memlab/mem_reporter.py @@ -92,7 +92,7 @@ def get_tensor_stat(tensor: torch.Tensor) -> List[Tuple[str, int, int, int]]: numel = tensor.numel() element_size = tensor.element_size() - fact_numel = tensor.storage().size() + fact_numel = tensor.untyped_storage().size() fact_memory_size = fact_numel * element_size # since pytorch allocate at least 512 Bytes for any tensor, round # up to a multiple of 512 @@ -101,7 +101,7 @@ def get_tensor_stat(tensor: torch.Tensor) -> List[Tuple[str, int, int, int]]: # tensor.storage should be the actual object related to memory # allocation - data_ptr = tensor.storage().data_ptr() + data_ptr = tensor.untyped_storage().data_ptr() if data_ptr in visited_data: name = '{}(->{})'.format( name,