File tree Expand file tree Collapse file tree 2 files changed +22
-4
lines changed Expand file tree Collapse file tree 2 files changed +22
-4
lines changed Original file line number Diff line number Diff line change 4040from dataset_simple import LatentEmbedDataset
4141
4242import sys
43-
44-
45- sys .path .append (".." )
46-
4743from utils import print_memory , reset_memory # isort:skip
4844
4945
Original file line number Diff line number Diff line change 1+ import gc
2+ import inspect
3+ from typing import Optional , Tuple , Union
4+
5+ import torch
6+
7+ logger = get_logger (__name__ )
8+
9+ def reset_memory (device : Union [str , torch .device ]) -> None :
10+ gc .collect ()
11+ torch .cuda .empty_cache ()
12+ torch .cuda .reset_peak_memory_stats (device )
13+ torch .cuda .reset_accumulated_memory_stats (device )
14+
15+
16+ def print_memory (device : Union [str , torch .device ]) -> None :
17+ memory_allocated = torch .cuda .memory_allocated (device ) / 1024 ** 3
18+ max_memory_allocated = torch .cuda .max_memory_allocated (device ) / 1024 ** 3
19+ max_memory_reserved = torch .cuda .max_memory_reserved (device ) / 1024 ** 3
20+ print (f"{ memory_allocated = :.3f} GB" )
21+ print (f"{ max_memory_allocated = :.3f} GB" )
22+ print (f"{ max_memory_reserved = :.3f} GB" )
You can’t perform that action at this time.
0 commit comments