forked from keras-team/keras-hub
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconftest.py
More file actions
168 lines (147 loc) · 5.14 KB
/
conftest.py
File metadata and controls
168 lines (147 loc) · 5.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
import os
import keras
import pytest
# OpenVINO supported test paths
OPENVINO_SUPPORTED_PATHS = [
"keras-hub/integration_tests",
"keras_hub/src/models/gemma",
"keras_hub/src/models/gpt2",
"keras_hub/src/models/mistral",
"keras_hub/src/tokenizers",
]
# OpenVINO specific test skips
OPENVINO_SPECIFIC_SKIPPING_TESTS = {
"test_backbone_basics": "bfloat16 dtype not supported",
"test_score_loss": "Non-implemented roll operation",
"test_causal_lm_basics": "Missing ops and requires trainable backend",
}
def pytest_addoption(parser):
parser.addoption(
"--run_large",
action="store_true",
default=False,
help="run large tests",
)
parser.addoption(
"--run_extra_large",
action="store_true",
default=False,
help="run extra_large tests",
)
parser.addoption(
"--docstring_module",
action="store",
default="",
help="restrict docs testing to modules whose name matches this flag",
)
parser.addoption(
"--check_gpu",
action="store_true",
default=False,
help="fail if a gpu is not present",
)
def pytest_configure(config):
# Monkey-patch training methods for OpenVINO backend
if keras.config.backend() == "openvino":
keras.Model.fit = lambda *args, **kwargs: pytest.skip(
"Model.fit() not supported on OpenVINO backend"
)
keras.Model.train_on_batch = lambda *args, **kwargs: pytest.skip(
"Model.train_on_batch() not supported on OpenVINO backend"
)
# Verify that device has GPU and detected by backend
if config.getoption("--check_gpu"):
found_gpu = False
backend = keras.config.backend()
if backend == "jax":
import jax
try:
found_gpu = bool(jax.devices("gpu"))
except RuntimeError:
found_gpu = False
elif backend == "tensorflow":
import tensorflow as tf
found_gpu = bool(tf.config.list_logical_devices("GPU"))
elif backend == "torch":
import torch
found_gpu = bool(torch.cuda.device_count())
if not found_gpu:
pytest.fail(f"No GPUs discovered on the {backend} backend.")
config.addinivalue_line(
"markers",
"large: mark test as being slow or requiring a network",
)
config.addinivalue_line(
"markers",
"extra_large: mark test as being too large to run continuously",
)
config.addinivalue_line(
"markers",
"tf_only: mark test as a tf only test",
)
config.addinivalue_line(
"markers",
"kaggle_key_required: mark test needing a kaggle key",
)
def pytest_collection_modifyitems(config, items):
run_extra_large_tests = config.getoption("--run_extra_large")
# Run large tests for --run_extra_large or --run_large.
run_large_tests = config.getoption("--run_large") or run_extra_large_tests
# Messages to annotate skipped tests with.
skip_large = pytest.mark.skipif(
not run_large_tests,
reason="need --run_large option to run",
)
skip_extra_large = pytest.mark.skipif(
not run_extra_large_tests,
reason="need --run_extra_large option to run",
)
tf_only = pytest.mark.skipif(
not keras.config.backend() == "tensorflow",
reason="tests only run on tf backend",
)
found_kaggle_key = all(
[
os.environ.get("KAGGLE_USERNAME", None),
os.environ.get("KAGGLE_KEY", None),
]
)
kaggle_key_required = pytest.mark.skipif(
not found_kaggle_key,
reason="tests only run with a kaggle api key",
)
for item in items:
if "large" in item.keywords:
item.add_marker(skip_large)
if "extra_large" in item.keywords:
item.add_marker(skip_extra_large)
if "tf_only" in item.keywords:
item.add_marker(tf_only)
if "kaggle_key_required" in item.keywords:
item.add_marker(kaggle_key_required)
# OpenVINO-specific test skipping
if keras.config.backend() == "openvino":
test_name = item.name.split("[")[0]
if test_name in OPENVINO_SPECIFIC_SKIPPING_TESTS:
item.add_marker(
pytest.mark.skipif(
True,
reason="OpenVINO: "
f"{OPENVINO_SPECIFIC_SKIPPING_TESTS[test_name]}",
)
)
continue
is_whitelisted = any(
item.nodeid.startswith(supported_path + "/")
or item.nodeid.startswith(supported_path + "::")
or item.nodeid == supported_path
for supported_path in OPENVINO_SUPPORTED_PATHS
)
if not is_whitelisted:
item.add_marker(
pytest.mark.skipif(
True, reason="OpenVINO: File/directory not in whitelist"
)
)
# Disable traceback filtering for quicker debugging of tests failures.
keras.config.disable_traceback_filtering()