-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Expand file tree
/
Copy pathexp_basic.py
More file actions
114 lines (88 loc) · 3.47 KB
/
exp_basic.py
File metadata and controls
114 lines (88 loc) · 3.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import os
import torch
import importlib
import pkgutil
# Just put your model files under models/ folder
# e.g., models/Transformer.py, models/LSTM.py, etc.
# All models will be automatically detected and can be used by specifying their names.
class Exp_Basic(object):
def __init__(self, args):
self.args = args
# -------------------------------------------------------
# Automatically generate model map
# -------------------------------------------------------
model_map = self._scan_models_directory()
# Use smart dictionary
self.model_dict = LazyModelDict(model_map)
self.device = self._acquire_device()
self.model = self._build_model().to(self.device)
def _scan_models_directory(self):
"""
Automatically scan all .py files in the models folder
"""
model_map = {}
models_dir = 'models'
# Iterate through all files in 'models' directory
if os.path.exists(models_dir):
for filename in os.listdir(models_dir):
# Ignore __init__.py and non-.py files
if filename.endswith('.py') and filename != '__init__.py':
# Remove .py extension to get module name
module_name = filename[:-3]
# Build full import path
full_path = f"{models_dir}.{module_name}"
# loading dict: {'Transformer': 'models.Transformer'}
model_map[module_name] = full_path
return model_map
def _build_model(self):
raise NotImplementedError
return None
def _acquire_device(self):
if self.args.use_gpu and self.args.gpu_type == 'cuda':
os.environ["CUDA_VISIBLE_DEVICES"] = str(
self.args.gpu) if not self.args.use_multi_gpu else self.args.devices
device = torch.device('cuda:{}'.format(self.args.gpu))
print('Use GPU: cuda:{}'.format(self.args.gpu))
elif self.args.use_gpu and self.args.gpu_type == 'mps':
device = torch.device('mps')
print('Use GPU: mps')
else:
device = torch.device('cpu')
print('Use CPU')
return device
def _get_data(self):
pass
def vali(self):
pass
def train(self):
pass
def test(self):
pass
class LazyModelDict(dict):
"""
Smart Lazy-Loading Dictionary
"""
def __init__(self, model_map):
self.model_map = model_map
super().__init__()
def __getitem__(self, key):
if key in self:
return super().__getitem__(key)
if key not in self.model_map:
raise NotImplementedError(f"Model [{key}] not found in 'models' directory.")
module_path = self.model_map[key]
try:
print(f"🚀 Lazy Loading: {key} ...")
module = importlib.import_module(module_path)
except ImportError as e:
print(f"❌ Error: Failed to import model [{key}]. Dependencies missing?")
raise e
# Try to find the model class
if hasattr(module, 'Model'):
model_class = module.Model
elif hasattr(module, key):
model_class = getattr(module, key)
else:
raise AttributeError(f"Module {module_path} has no class 'Model' or '{key}'")
self[key] = model_class
return model_class