Skip to content

Commit e6e3643

Browse files
committed
add jobé
1 parent d2b1c92 commit e6e3643

File tree

4 files changed

+81
-5
lines changed

4 files changed

+81
-5
lines changed

.github/workflows/ci.yml

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,3 +82,73 @@ jobs:
8282
export PYTHONPATH=.
8383
UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_onnxruntime_evaluator.py --maxfail=15
8484
export PYTHONPATH=
85+
86+
run:
87+
name: tr4.48-ci ${{ matrix.os }}-${{ matrix.python }}
88+
runs-on: ${{ matrix.os }}
89+
strategy:
90+
matrix:
91+
os: [ubuntu-latest]
92+
python: ['3.12']
93+
94+
steps:
95+
- uses: actions/checkout@v3
96+
97+
- uses: actions/setup-python@v4
98+
with:
99+
python-version: ${{ matrix.python }}
100+
101+
- name: Install pytorch
102+
run: python -m pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cpu
103+
104+
- name: Install transformers
105+
run: pip install transformers==4.48
106+
107+
- name: Install requirements
108+
run: python -m pip install -r requirements.txt
109+
110+
- name: Install requirements dev
111+
run: python -m pip install -r requirements-dev.txt
112+
113+
- name: Cache pip
114+
uses: actions/cache@v4
115+
with:
116+
path: ~/.cache/pip
117+
key: ${{ runner.os }}-pip-${{ hashFiles('requirements-dev.txt') }}
118+
restore-keys: |
119+
${{ runner.os }}-pip-
120+
${{ runner.os }}-
121+
122+
- name: pip freeze
123+
run: python -m pip freeze
124+
125+
- name: tiny-llm torch.export.export
126+
run: |
127+
export PYTHONPATH=.
128+
python _unittests/ut_torch_models/test_llms.py
129+
130+
- name: tiny-llm onnx
131+
run: |
132+
export PYTHONPATH=.
133+
python _unittests/ut_torch_models/test_llms_onnx.py
134+
135+
- name: run tests
136+
run: |
137+
pip install pytest
138+
export PYTHONPATH=.
139+
UNITTEST_GOING=1 pytest --durations=10 _unittests --ignore _unittests/ut_reference/test_backend_extended_reference_evaluator.py --ignore _unittests/ut_reference/test_backend_onnxruntime_evaluator.py
140+
export PYTHONPATH=
141+
142+
- name: run backend tests python
143+
run: |
144+
pip install pytest
145+
export PYTHONPATH=.
146+
UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_extended_reference_evaluator.py
147+
export PYTHONPATH=
148+
149+
- name: run backend tests onnxruntime
150+
run: |
151+
pip install pytest
152+
export PYTHONPATH=.
153+
UNITTEST_GOING=1 pytest --durations=10 _unittests/ut_reference/test_backend_onnxruntime_evaluator.py --maxfail=15
154+
export PYTHONPATH=

_unittests/ut_torch_models/test_llms.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import unittest
22
import torch
3-
from onnx_diagnostic.ext_test_case import ExtTestCase, ignore_warnings
3+
from onnx_diagnostic.ext_test_case import ExtTestCase, ignore_warnings, requires_transformers
44
from onnx_diagnostic.torch_models.llms import get_tiny_llm
55
from onnx_diagnostic.helpers import string_type
66
from onnx_diagnostic.torch_export_patches import bypass_export_some_errors
@@ -14,6 +14,7 @@ def test_get_tiny_llm(self):
1414
model(**inputs)
1515

1616
@ignore_warnings(UserWarning)
17+
@requires_transformers("4.52")
1718
def test_export_tiny_llm_1(self):
1819
data = get_tiny_llm()
1920
model, inputs = data["model"], data["inputs"]
@@ -28,7 +29,9 @@ def test_export_tiny_llm_2_bypassed(self):
2829
data = get_tiny_llm()
2930
model, inputs = data["model"], data["inputs"]
3031
self.assertEqual({"attention_mask", "past_key_values", "input_ids"}, set(inputs))
31-
with bypass_export_some_errors():
32+
with bypass_export_some_errors(
33+
patch_transformers=True, replace_dynamic_cache=True, verbose=10
34+
):
3235
ep = torch.export.export(
3336
model, (), kwargs=inputs, dynamic_shapes=data["dynamic_shapes"]
3437
)

_unittests/ut_torch_models/test_llms_onnx.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ def test_onnx_export_tiny_llm(self):
1818
data = get_tiny_llm()
1919
model, inputs = data["model"], data["inputs"]
2020
self.assertEqual({"attention_mask", "past_key_values", "input_ids"}, set(inputs))
21-
with bypass_export_some_errors():
21+
with bypass_export_some_errors(patch_transformers=True, replace_dynamic_cache=True):
2222
ep = torch.onnx.export(
2323
model,
2424
(),
@@ -38,7 +38,7 @@ def test_onnx_export_tiny_llm_cdbg(self):
3838
data = get_tiny_llm()
3939
model, inputs = data["model"], data["inputs"]
4040
self.assertEqual({"attention_mask", "past_key_values", "input_ids"}, set(inputs))
41-
with bypass_export_some_errors():
41+
with bypass_export_some_errors(patch_transformers=True, replace_dynamic_cache=True):
4242
onx = to_onnx(model, (), kwargs=inputs, dynamic_shapes=data["dynamic_shapes"])
4343
self.assert_onnx_disc(
4444
inspect.currentframe().f_code.co_name, onx, model, inputs, verbose=1

onnx_diagnostic/torch_export_patches/onnx_export_errors.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -356,7 +356,10 @@ def bypass_export_some_errors(
356356
from .patches.patch_transformers import patched_AttentionMaskConverter
357357

358358
if verbose:
359-
print("[bypass_export_some_errors] patch transformers")
359+
print(
360+
f"[bypass_export_some_errors] patch transformers "
361+
f"{transformers.__version__}"
362+
)
360363
keep__make_causal_mask = AttentionMaskConverter._make_causal_mask
361364
AttentionMaskConverter._make_causal_mask = (
362365
patched_AttentionMaskConverter._make_causal_mask

0 commit comments

Comments
 (0)