Skip to content

Commit 3a7e9f6

Browse files
authored
Add unit test for classification task and configurer (#2035)
1 parent 87efe8e commit 3a7e9f6

File tree

3 files changed

+679
-2
lines changed

3 files changed

+679
-2
lines changed
Lines changed: 266 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,266 @@
1+
import copy
2+
import os
3+
4+
import pytest
5+
import tempfile
6+
from mmcv.utils import ConfigDict
7+
8+
from otx.algorithms.common.adapters.mmcv.utils.config_utils import MPAConfig
9+
from otx.algorithms.classification.adapters.mmcls.configurer import (
10+
ClassificationConfigurer,
11+
IncrClassificationConfigurer,
12+
SemiSLClassificationConfigurer,
13+
)
14+
from tests.test_suite.e2e_test_system import e2e_pytest_unit
15+
from tests.unit.algorithms.classification.test_helper import DEFAULT_CLS_TEMPLATE_DIR
16+
17+
18+
class TestClassificationConfigurer:
19+
@pytest.fixture(autouse=True)
20+
def setup(self) -> None:
21+
self.configurer = ClassificationConfigurer()
22+
self.model_cfg = MPAConfig.fromfile(os.path.join(DEFAULT_CLS_TEMPLATE_DIR, "model.py"))
23+
self.data_cfg = MPAConfig.fromfile(os.path.join(DEFAULT_CLS_TEMPLATE_DIR, "data_pipeline.py"))
24+
25+
@e2e_pytest_unit
26+
def test_configure(self, mocker):
27+
mock_cfg_base = mocker.patch.object(ClassificationConfigurer, "configure_base")
28+
mock_cfg_device = mocker.patch.object(ClassificationConfigurer, "configure_device")
29+
mock_cfg_ckpt = mocker.patch.object(ClassificationConfigurer, "configure_ckpt")
30+
mock_cfg_model = mocker.patch.object(ClassificationConfigurer, "configure_model")
31+
mock_cfg_data = mocker.patch.object(ClassificationConfigurer, "configure_data")
32+
mock_cfg_task = mocker.patch.object(ClassificationConfigurer, "configure_task")
33+
mock_cfg_hook = mocker.patch.object(ClassificationConfigurer, "configure_hook")
34+
mock_cfg_gpu = mocker.patch.object(ClassificationConfigurer, "configure_samples_per_gpu")
35+
mock_cfg_fp16_optimizer = mocker.patch.object(ClassificationConfigurer, "configure_fp16_optimizer")
36+
mock_cfg_compat_cfg = mocker.patch.object(ClassificationConfigurer, "configure_compat_cfg")
37+
38+
model_cfg = copy.deepcopy(self.model_cfg)
39+
data_cfg = copy.deepcopy(self.data_cfg)
40+
returned_value = self.configurer.configure(model_cfg, "", data_cfg, True)
41+
mock_cfg_base.assert_called_once_with(model_cfg, data_cfg, None, None)
42+
mock_cfg_device.assert_called_once_with(model_cfg, True)
43+
mock_cfg_ckpt.assert_called_once_with(model_cfg, "")
44+
mock_cfg_model.assert_called_once_with(model_cfg, None)
45+
mock_cfg_data.assert_called_once_with(model_cfg, True, data_cfg)
46+
mock_cfg_task.assert_called_once_with(model_cfg, True)
47+
mock_cfg_hook.assert_called_once_with(model_cfg)
48+
mock_cfg_gpu.assert_called_once_with(model_cfg, "train")
49+
mock_cfg_fp16_optimizer.assert_called_once_with(model_cfg)
50+
mock_cfg_compat_cfg.assert_called_once_with(model_cfg)
51+
assert returned_value == model_cfg
52+
53+
@e2e_pytest_unit
54+
def test_configure_base(self, mocker):
55+
mocker.patch(
56+
"otx.algorithms.classification.adapters.mmcls.configurer.align_data_config_with_recipe",
57+
return_value=True,
58+
)
59+
mocker.patch(
60+
"otx.algorithms.classification.adapters.mmcls.configurer.patch_datasets",
61+
return_value=True,
62+
)
63+
mocker.patch(
64+
"otx.algorithms.classification.adapters.mmcls.configurer.patch_persistent_workers",
65+
return_value=True,
66+
)
67+
68+
model_cfg = copy.deepcopy(self.model_cfg)
69+
data_cfg = copy.deepcopy(self.data_cfg._cfg_dict)
70+
self.configurer.configure_base(model_cfg, data_cfg, [], [])
71+
72+
@e2e_pytest_unit
73+
def test_configure_device(self, mocker):
74+
mocker.patch(
75+
"torch.distributed.is_initialized",
76+
return_value=True,
77+
)
78+
mocker.patch("os.environ", return_value={"LOCAL_RANK": 2})
79+
config = copy.deepcopy(self.model_cfg)
80+
self.configurer.configure_device(config, True)
81+
assert config.distributed is True
82+
83+
mocker.patch(
84+
"torch.distributed.is_initialized",
85+
return_value=False,
86+
)
87+
mocker.patch(
88+
"torch.cuda.is_available",
89+
return_value=False,
90+
)
91+
config = copy.deepcopy(self.model_cfg)
92+
self.configurer.configure_device(config, True)
93+
assert config.distributed is False
94+
assert config.device == "cpu"
95+
96+
mocker.patch(
97+
"torch.distributed.is_initialized",
98+
return_value=False,
99+
)
100+
mocker.patch(
101+
"torch.cuda.is_available",
102+
return_value=True,
103+
)
104+
config = copy.deepcopy(self.model_cfg)
105+
self.configurer.configure_device(config, True)
106+
assert config.distributed is False
107+
assert config.device == "cuda"
108+
109+
@e2e_pytest_unit
110+
def test_configure_model(self):
111+
ir_options = {"ir_model_path": {"ir_weight_path": "", "ir_weight_init": ""}}
112+
self.model_cfg.model.head.in_channels = -1
113+
self.configurer.configure_model(self.model_cfg, ir_options)
114+
assert self.model_cfg.model_task
115+
assert self.model_cfg.model.head.in_channels == 960
116+
117+
@e2e_pytest_unit
118+
def test_configure_model_not_classification_task(self):
119+
ir_options = {"ir_model_path": {"ir_weight_path": "", "ir_weight_init": ""}}
120+
configure_cfg = copy.deepcopy(self.model_cfg)
121+
configure_cfg.model.task = "detection"
122+
with pytest.raises(ValueError):
123+
self.configurer.configure_model(configure_cfg, ir_options)
124+
125+
@e2e_pytest_unit
126+
def test_configure_ckpt(self, mocker):
127+
model_cfg = copy.deepcopy(self.model_cfg)
128+
model_cfg.resume = True
129+
130+
mocker.patch(
131+
"otx.algorithms.classification.adapters.mmcls.configurer.CheckpointLoader.load_checkpoint",
132+
return_value={"model": None},
133+
)
134+
with tempfile.TemporaryDirectory() as tempdir:
135+
self.configurer.configure_ckpt(model_cfg, os.path.join(tempdir, "dummy.pth"))
136+
137+
@e2e_pytest_unit
138+
def test_configure_data(self, mocker):
139+
data_cfg = copy.deepcopy(self.data_cfg)
140+
data_cfg.data.pipeline_options = dict(
141+
MinIouRandomCrop=dict(min_crop_size=0.1),
142+
Resize=dict(
143+
img_scale=[(1344, 480), (1344, 960)],
144+
multiscale_mode="range",
145+
),
146+
Normalize=dict(),
147+
MultiScaleFlipAug=dict(
148+
img_scale=(1344, 800),
149+
flip=False,
150+
transforms=[
151+
dict(type="Resize", keep_ratio=False),
152+
dict(type="Normalize"),
153+
dict(type="Pad", size_divisor=32),
154+
dict(type="ImageToTensor", keys=["img"]),
155+
dict(type="Collect", keys=["img"]),
156+
],
157+
),
158+
)
159+
self.configurer.configure_data(self.model_cfg, True, data_cfg)
160+
assert self.model_cfg.data
161+
assert self.model_cfg.data.train
162+
assert self.model_cfg.data.val
163+
164+
@e2e_pytest_unit
165+
def test_configure_task(self, mocker):
166+
model_cfg = copy.deepcopy(self.model_cfg)
167+
model_cfg.update(self.data_cfg)
168+
model_cfg.task_adapt = {"type": "mpa", "op": "REPLACE", "use_mpa_anchor": True}
169+
self.configurer.configure_task(model_cfg, True)
170+
171+
self.configurer.model_classes = []
172+
self.configurer.data_classes = ["red", "green"]
173+
self.configurer.configure_task(model_cfg, True)
174+
175+
@e2e_pytest_unit
176+
def test_configure_hook(self):
177+
model_cfg = copy.deepcopy(self.model_cfg)
178+
model_cfg.custom_hooks = [{"type": "LazyEarlyStoppingHook", "start": 3}]
179+
model_cfg.custom_hook_options = {"LazyEarlyStoppingHook": {"start": 5}, "LoggerReplaceHook": {"_delete_": True}}
180+
self.configurer.configure_hook(model_cfg)
181+
assert model_cfg.custom_hooks[0]["start"] == 5
182+
183+
@e2e_pytest_unit
184+
def test_configure_samples_per_gpu(self):
185+
model_cfg = copy.deepcopy(self.model_cfg)
186+
model_cfg.update(self.data_cfg)
187+
model_cfg.data.train.otx_dataset = range(1)
188+
self.configurer.configure_samples_per_gpu(model_cfg, "train")
189+
assert model_cfg.data.train_dataloader == {"samples_per_gpu": 1, "drop_last": True}
190+
191+
@e2e_pytest_unit
192+
def test_configure_fp16_optimizer(self):
193+
model_cfg = copy.deepcopy(self.model_cfg)
194+
model_cfg.fp16 = {}
195+
model_cfg.optimizer_config.type = "OptimizerHook"
196+
self.configurer.configure_fp16_optimizer(model_cfg)
197+
assert model_cfg.optimizer_config.type == "Fp16OptimizerHook"
198+
199+
model_cfg.fp16 = {}
200+
model_cfg.optimizer_config.type = "SAMOptimizerHook"
201+
self.configurer.configure_fp16_optimizer(model_cfg)
202+
assert model_cfg.optimizer_config.type == "Fp16SAMOptimizerHook"
203+
204+
model_cfg.fp16 = {}
205+
model_cfg.optimizer_config.type = "DummyOptimizerHook"
206+
self.configurer.configure_fp16_optimizer(model_cfg)
207+
assert model_cfg.optimizer_config.type == "DummyOptimizerHook"
208+
209+
@e2e_pytest_unit
210+
def test_configure_compat_cfg(self):
211+
model_cfg = copy.deepcopy(self.model_cfg)
212+
model_cfg.update(self.data_cfg)
213+
model_cfg.data.train_dataloader = {}
214+
model_cfg.data.val_dataloader = {}
215+
model_cfg.data.test_dataloader = {}
216+
self.configurer.configure_compat_cfg(model_cfg)
217+
218+
@e2e_pytest_unit
219+
def test_get_data_cfg(self):
220+
config = copy.deepcopy(self.model_cfg)
221+
config.update(self.data_cfg)
222+
config.data.train.dataset = ConfigDict({"dataset": [1, 2, 3]})
223+
assert [1, 2, 3] == self.configurer.get_data_cfg(config, "train")
224+
225+
226+
class TestIncrClassificationConfigurer:
227+
@pytest.fixture(autouse=True)
228+
def setup(self) -> None:
229+
self.configurer = IncrClassificationConfigurer()
230+
self.model_cfg = MPAConfig.fromfile(os.path.join(DEFAULT_CLS_TEMPLATE_DIR, "model.py"))
231+
self.data_cfg = MPAConfig.fromfile(os.path.join(DEFAULT_CLS_TEMPLATE_DIR, "data_pipeline.py"))
232+
233+
def test_configure_task(self, mocker):
234+
mocker.patch.object(ClassificationConfigurer, "configure_task")
235+
self.model_cfg.update(self.data_cfg)
236+
self.model_cfg.task_adapt = {}
237+
self.configurer.task_adapt_type = "mpa"
238+
self.configurer.configure_task(self.model_cfg, True)
239+
assert self.model_cfg.custom_hooks[0].type == "TaskAdaptHook"
240+
assert self.model_cfg.custom_hooks[0].sampler_flag is False
241+
242+
243+
class TestSemiSLClassificationConfigurer:
244+
@pytest.fixture(autouse=True)
245+
def setup(self) -> None:
246+
self.configurer = SemiSLClassificationConfigurer()
247+
self.model_cfg = MPAConfig.fromfile(os.path.join(DEFAULT_CLS_TEMPLATE_DIR, "model.py"))
248+
self.data_cfg = MPAConfig.fromfile(os.path.join(DEFAULT_CLS_TEMPLATE_DIR, "data_pipeline.py"))
249+
250+
def test_configure_data(self, mocker):
251+
mocker.patch.object(ClassificationConfigurer, "configure_data")
252+
mocker.patch("mmdet.datasets.build_dataset", return_value=[])
253+
mocker.patch("otx.algorithms.classification.adapters.mmcls.configurer.build_dataloader", return_value=[])
254+
self.model_cfg.update(self.data_cfg)
255+
self.model_cfg.data.unlabeled = ConfigDict({"type": "OTXDataset", "otx_dataset": range(10)})
256+
self.model_cfg.model_task = "detection"
257+
self.model_cfg.distributed = False
258+
self.configurer.configure_data(self.model_cfg, True, self.data_cfg)
259+
260+
def test_configure_task(self):
261+
self.model_cfg.update(self.data_cfg)
262+
self.model_cfg.task_adapt = {"type": "mpa", "op": "REPLACE", "use_mpa_anchor": True}
263+
self.configurer.configure_task(self.model_cfg, True)
264+
265+
self.model_cfg.task_adapt = {"type": "not_mpa", "op": "REPLACE", "use_mpa_anchor": True}
266+
self.configurer.configure_task(self.model_cfg, True)

0 commit comments

Comments
 (0)