Skip to content

Commit bc28e6a

Browse files
authored
Enable a disabled test (#49)
* fix issues * try * unstable * ci * fix ci * bypass
1 parent 5395ec8 commit bc28e6a

File tree

6 files changed

+106
-106
lines changed

6 files changed

+106
-106
lines changed

.github/workflows/ci.yml

Lines changed: 10 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -67,45 +67,30 @@ jobs:
6767
run: python -m pip freeze
6868

6969
- name: tiny-llm torch.export.export
70-
run: |
71-
export PYTHONPATH=.
72-
python _unittests/ut_torch_models/test_tiny_llms.py
70+
run: PYTHONPATH=. python _unittests/ut_torch_models/test_tiny_llms.py
7371

7472
- name: tiny-llm onnx
75-
run: |
76-
export PYTHONPATH=.
77-
python _unittests/ut_torch_models/test_tiny_llms_onnx.py
73+
run: PYTHONPATH=. python _unittests/ut_torch_models/test_tiny_llms_onnx.py
7874
continue-on-error: true # connectivity issues
7975

8076
- name: tiny-llm example
81-
run: |
82-
export PYTHONPATH=.
83-
python _doc/examples/plot_export_tiny_llm.py
77+
run: PYTHONPATH=. python _doc/examples/plot_export_tiny_llm.py
8478
continue-on-error: true # connectivity issues
8579

8680
- name: tiny-llm bypass
87-
run: |
88-
export PYTHONPATH=.
89-
python _doc/examples/plot_export_tiny_llm_patched.py
81+
run: PYTHONPATH=. python _doc/examples/plot_export_tiny_llm_patched.py
9082
continue-on-error: true # connectivity issues
9183

84+
- name: run tests bypassed
85+
run: PYTHONPATH=. python _unittests/ut_torch_models/test_tiny_llms_bypassed.py
86+
9287
- name: run tests
9388
run: |
9489
pip install pytest
95-
export PYTHONPATH=.
96-
UNITTEST_GOING=1 pytest --durations=10 _unittests --ignore _unittests/ut_reference/test_backend_extended_reference_evaluator.py --ignore _unittests/ut_reference/test_backend_onnxruntime_evaluator.py
97-
export PYTHONPATH=
90+
PYTHONPATH=. UNITTEST_GOING=1 pytest --durations=10 _unittests --ignore _unittests/ut_reference/test_backend_extended_reference_evaluator.py --ignore _unittests/ut_reference/test_backend_onnxruntime_evaluator.py --ignore _unittests/ut_torch_models/test_tiny_llms_bypassed.py
9891
9992
- name: run backend tests python
100-
run: |
101-
pip install pytest
102-
export PYTHONPATH=.
103-
UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_extended_reference_evaluator.py
104-
export PYTHONPATH=
93+
run: PYTHONPATH=. UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_extended_reference_evaluator.py
10594

10695
- name: run backend tests onnxruntime
107-
run: |
108-
pip install pytest
109-
export PYTHONPATH=.
110-
UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_onnxruntime_evaluator.py --maxfail=15
111-
export PYTHONPATH=
96+
run: PYTHONPATH=. UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_onnxruntime_evaluator.py --maxfail=15

.github/workflows/documentation.yml

Lines changed: 9 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -54,33 +54,25 @@ jobs:
5454
run: python -m pip freeze
5555

5656
- name: tiny-llm torch.export.export
57-
run: |
58-
export PYTHONPATH=.
59-
python _unittests/ut_torch_models/test_tiny_llms.py
57+
run: PYTHONPATH=. python _unittests/ut_torch_models/test_tiny_llms.py
6058

6159
- name: tiny-llm onnx
62-
run: |
63-
export PYTHONPATH=.
64-
python _unittests/ut_torch_models/test_tiny_llms_onnx.py
60+
run: PYTHONPATH=. python _unittests/ut_torch_models/test_tiny_llms_onnx.py
6561
continue-on-error: true
6662

6763
- name: tiny-llm example
68-
run: |
69-
export PYTHONPATH=.
70-
python _doc/examples/plot_export_tiny_llm.py
64+
run: PYTHONPATH=. python _doc/examples/plot_export_tiny_llm.py
7165

7266
- name: tiny-llm bypass
73-
run: |
74-
export PYTHONPATH=.
75-
python _doc/examples/plot_export_tiny_llm_patched.py
67+
run: PYTHONPATH=. python _doc/examples/plot_export_tiny_llm_patched.py
68+
69+
- name: run tests bypassed
70+
run: PYTHONPATH=. python _unittests/ut_torch_models/test_tiny_llms_bypassed.py
7671

7772
- name: Generate coverage report
7873
run: |
79-
pip install pytest
80-
pip install pytest-cov
81-
export PYTHONPATH=.
82-
UNITTEST_GOING=1 pytest --cov=./onnx_diagnostic/ --cov-report=xml --durations=10 _unittests --ignore _unittests/ut_reference/test_backend_extended_reference_evaluator.py --ignore _unittests/ut_reference/test_backend_onnxruntime_evaluator.py
83-
export PYTHONPATH=
74+
pip install pytest pytest-cov
75+
PYTHONPATH=. UNITTEST_GOING=1 pytest --cov=./onnx_diagnostic/ --cov-report=xml --durations=10 _unittests --ignore _unittests/ut_reference/test_backend_extended_reference_evaluator.py --ignore _unittests/ut_reference/test_backend_onnxruntime_evaluator.py --ignore _unittests/ut_torch_models/test_tiny_llms_bypassed.py
8476
8577
- name: Upload coverage reports to Codecov
8678
uses: codecov/codecov-action@v3

_unittests/ut_torch_models/test_llm_phi2.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from onnx_diagnostic.ext_test_case import ExtTestCase, ignore_warnings, requires_transformers
44
from onnx_diagnostic.torch_models.llms import get_phi2
55
from onnx_diagnostic.helpers import string_type
6-
from onnx_diagnostic.torch_export_patches import bypass_export_some_errors
76

87

98
class TestLlmPhi(ExtTestCase):
@@ -24,23 +23,6 @@ def test_export_phi2_1(self):
2423
ep = torch.export.export(model, (), kwargs=inputs, dynamic_shapes=ds)
2524
assert ep
2625

27-
@ignore_warnings(UserWarning)
28-
@requires_transformers("4.52") # TODO
29-
def test_export_phi2_2_bypassed(self):
30-
data = get_phi2(num_hidden_layers=2)
31-
model, inputs, ds = data["model"], data["inputs"], data["dynamic_shapes"]
32-
self.assertEqual(
33-
{"attention_mask", "past_key_values", "input_ids", "position_ids"}, set(inputs)
34-
)
35-
with bypass_export_some_errors(patch_transformers=True) as modificator:
36-
inputs = modificator(inputs)
37-
ep = torch.export.export(model, (), kwargs=inputs, dynamic_shapes=ds, strict=False)
38-
assert ep
39-
with bypass_export_some_errors(patch_transformers=True) as modificator:
40-
inputs = modificator(inputs)
41-
ep = torch.export.export(model, (), kwargs=inputs, dynamic_shapes=ds, strict=False)
42-
assert ep
43-
4426

4527
if __name__ == "__main__":
4628
unittest.main(verbosity=2)
Lines changed: 0 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,9 @@
11
import copy
22
import unittest
33
import torch
4-
from transformers.cache_utils import DynamicCache
54
from onnx_diagnostic.ext_test_case import ExtTestCase, ignore_warnings, requires_transformers
65
from onnx_diagnostic.torch_models.llms import get_tiny_llm
76
from onnx_diagnostic.helpers import string_type
8-
from onnx_diagnostic.torch_export_patches import bypass_export_some_errors
9-
from onnx_diagnostic.torch_export_patches.patches.patch_transformers import (
10-
patched_DynamicCache,
11-
)
127

138

149
class TestTinyLlm(ExtTestCase):
@@ -33,47 +28,6 @@ def test_export_tiny_llm_1(self):
3328
got = ep.module()(**inputs)
3429
self.assertEqualArrayAny(expected, got)
3530

36-
@ignore_warnings(UserWarning)
37-
def test_export_tiny_llm_2_bypassed(self):
38-
data = get_tiny_llm()
39-
model, inputs = data["model"], data["inputs"]
40-
expected = model(**copy.deepcopy(inputs))
41-
self.assertEqual(
42-
{"attention_mask", "past_key_values", "input_ids", "position_ids"}, set(inputs)
43-
)
44-
45-
with bypass_export_some_errors(
46-
patch_torch=False, patch_transformers=True, catch_constraints=False, verbose=10
47-
) as modificator:
48-
49-
for k in patched_DynamicCache._PATCHES_:
50-
self.assertEqual(getattr(patched_DynamicCache, k), getattr(DynamicCache, k))
51-
52-
inputs = modificator(copy.deepcopy(inputs))
53-
54-
def debug():
55-
print("***", string_type(inputs, with_shape=True))
56-
print("***", data["dynamic_shapes"])
57-
import torch.export._draft_export
58-
59-
ep, report = torch.export._draft_export.draft_export(
60-
model,
61-
(),
62-
kwargs=inputs,
63-
dynamic_shapes=data["dynamic_shapes"],
64-
strict=False,
65-
)
66-
print(report)
67-
68-
if self._debug():
69-
debug()
70-
71-
ep = torch.export.export(
72-
model, (), kwargs=inputs, dynamic_shapes=data["dynamic_shapes"], strict=False
73-
)
74-
got = ep.module()(**inputs)
75-
self.assertEqualArrayAny(expected, got)
76-
7731

7832
if __name__ == "__main__":
7933
unittest.main(verbosity=2)
Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
import copy
2+
import unittest
3+
import torch
4+
from transformers.cache_utils import DynamicCache
5+
from onnx_diagnostic.ext_test_case import ExtTestCase, ignore_warnings
6+
from onnx_diagnostic.torch_models.llms import get_tiny_llm
7+
from onnx_diagnostic.torch_models.llms import get_phi2
8+
from onnx_diagnostic.helpers import string_type
9+
from onnx_diagnostic.torch_export_patches import bypass_export_some_errors
10+
from onnx_diagnostic.torch_export_patches.patches.patch_transformers import (
11+
patched_DynamicCache,
12+
)
13+
14+
15+
class TestTinyLlmBypassed(ExtTestCase):
16+
@ignore_warnings(UserWarning)
17+
def test_export_tiny_llm_2_bypassed(self):
18+
data = get_tiny_llm()
19+
model, inputs = data["model"], data["inputs"]
20+
expected = model(**copy.deepcopy(inputs))
21+
self.assertEqual(
22+
{"attention_mask", "past_key_values", "input_ids", "position_ids"}, set(inputs)
23+
)
24+
25+
with bypass_export_some_errors(
26+
patch_torch=False, patch_transformers=True, catch_constraints=False, verbose=10
27+
) as modificator:
28+
29+
for k in patched_DynamicCache._PATCHES_:
30+
self.assertEqual(getattr(patched_DynamicCache, k), getattr(DynamicCache, k))
31+
32+
inputs = modificator(copy.deepcopy(inputs))
33+
34+
def debug():
35+
print("***", string_type(inputs, with_shape=True))
36+
print("***", data["dynamic_shapes"])
37+
import torch.export._draft_export
38+
39+
ep, report = torch.export._draft_export.draft_export(
40+
model,
41+
(),
42+
kwargs=inputs,
43+
dynamic_shapes=data["dynamic_shapes"],
44+
strict=False,
45+
)
46+
print(report)
47+
48+
if self._debug():
49+
debug()
50+
51+
ep = torch.export.export(
52+
model, (), kwargs=inputs, dynamic_shapes=data["dynamic_shapes"], strict=False
53+
)
54+
got = ep.module()(**inputs)
55+
self.assertEqualArrayAny(expected, got)
56+
57+
@ignore_warnings(UserWarning)
58+
def test_export_phi2_2_bypassed(self):
59+
data = get_phi2(num_hidden_layers=2)
60+
model, inputs, ds = data["model"], data["inputs"], data["dynamic_shapes"]
61+
self.assertEqual(
62+
{"attention_mask", "past_key_values", "input_ids", "position_ids"}, set(inputs)
63+
)
64+
with bypass_export_some_errors(patch_transformers=True) as modificator:
65+
inputs = modificator(inputs)
66+
ep = torch.export.export(model, (), kwargs=inputs, dynamic_shapes=ds, strict=False)
67+
assert ep
68+
with bypass_export_some_errors(patch_transformers=True) as modificator:
69+
inputs = modificator(inputs)
70+
ep = torch.export.export(model, (), kwargs=inputs, dynamic_shapes=ds, strict=False)
71+
assert ep
72+
73+
74+
if __name__ == "__main__":
75+
unittest.main(verbosity=2)

onnx_diagnostic/ext_test_case.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -399,6 +399,18 @@ def has_cuda() -> bool:
399399
return torch.cuda.device_count() > 0
400400

401401

402+
def requires_python(version: Tuple[int, ...], msg: str = ""):
403+
"""
404+
Skips a test if python is too old.
405+
406+
:param msg: to overwrite the message
407+
:param version: minimum version
408+
"""
409+
if sys.version_info[: len(version)] < version:
410+
return unittest.skip(msg or f"python not recent enough {sys.version_info} < {version}")
411+
return lambda x: x
412+
413+
402414
def requires_cuda(msg: str = "", version: str = "", memory: int = 0):
403415
"""
404416
Skips a test if cuda is not available.

0 commit comments

Comments
 (0)