1010warnings .warn (f"testing with bioimageio.spec { bioimageio_spec_version } " )
1111
1212# test models for various frameworks
13- torch_models = []
14- torch_models_pre_3_10 = [
13+ torch_models = [
1514 "unet2d_fixed_shape" ,
1615 "unet2d_multi_tensor" ,
1716 "unet2d_nuclei_broad_model" ,
9796# load all model packages we need for testing
9897load_model_packages = set ()
9998if not skip_torch :
100- if torch_version < (3 , 10 ):
101- torch_models += torch_models_pre_3_10
102-
10399 load_model_packages |= set (torch_models + torchscript_models )
104100
105101if not skip_onnx :
@@ -130,35 +126,6 @@ def pytest_configure():
130126# model groups of the form any_<weight format>_model that include all models providing a specific weight format
131127#
132128
133- # written as model group to automatically skip on missing torch
134- @pytest .fixture (params = [] if skip_torch or torch_version >= (3 , 10 ) else ["unet2d_nuclei_broad_model" ])
135- def unet2d_nuclei_broad_model (request ):
136- return pytest .model_packages [request .param ]
137-
138-
139- # written as model group to automatically skip on missing torch
140- @pytest .fixture (params = [] if skip_torch or torch_version >= (3 , 10 ) else ["unet2d_diff_output_shape" ])
141- def unet2d_diff_output_shape (request ):
142- return pytest .model_packages [request .param ]
143-
144-
145- # written as model group to automatically skip on missing tensorflow 1
146- @pytest .fixture (params = [] if skip_tensorflow or tf_major_version != 1 else ["stardist_wrong_shape" ])
147- def stardist_wrong_shape (request ):
148- return pytest .model_packages [request .param ]
149-
150-
151- # written as model group to automatically skip on missing tensorflow 1
152- @pytest .fixture (params = [] if skip_tensorflow or tf_major_version != 1 else ["stardist_wrong_shape2" ])
153- def stardist_wrong_shape2 (request ):
154- return pytest .model_packages [request .param ]
155-
156-
157- # written as model group to automatically skip on missing tensorflow 1
158- @pytest .fixture (params = [] if skip_tensorflow or tf_major_version != 1 else ["stardist" ])
159- def stardist (request ):
160- return pytest .model_packages [request .param ]
161-
162129
163130@pytest .fixture (params = [] if skip_torch else torch_models )
164131def any_torch_model (request ):
@@ -200,19 +167,22 @@ def any_model(request):
200167 return pytest .model_packages [request .param ]
201168
202169
170+ # TODO it would be nice to just generate fixtures for all the individual models dynamically
203171#
204172# temporary fixtures to test not with all, but only a manual selection of models
205173# (models/functionality should be improved to get rid of this specific model group)
206174#
175+
176+
207177@pytest .fixture (
208- params = [] if skip_torch or torch_version >= ( 3 , 10 ) else ["unet2d_nuclei_broad_model" , "unet2d_fixed_shape" ]
178+ params = [] if skip_torch else ["unet2d_nuclei_broad_model" , "unet2d_fixed_shape" ]
209179)
210180def unet2d_fixed_shape_or_not (request ):
211181 return pytest .model_packages [request .param ]
212182
213183
214184@pytest .fixture (
215- params = [] if skip_torch or torch_version >= ( 3 , 10 ) else ["unet2d_nuclei_broad_model" , "unet2d_multi_tensor" ]
185+ params = [] if skip_torch else ["unet2d_nuclei_broad_model" , "unet2d_multi_tensor" ]
216186)
217187def unet2d_multi_tensor_or_not (request ):
218188 return pytest .model_packages [request .param ]
@@ -221,3 +191,39 @@ def unet2d_multi_tensor_or_not(request):
221191@pytest .fixture (params = [] if skip_keras else ["unet2d_keras" ])
222192def unet2d_keras (request ):
223193 return pytest .model_packages [request .param ]
194+
195+
196+ # written as model group to automatically skip on missing torch
197+ @pytest .fixture (params = [] if skip_torch else ["unet2d_nuclei_broad_model" ])
198+ def unet2d_nuclei_broad_model (request ):
199+ return pytest .model_packages [request .param ]
200+
201+
202+ # written as model group to automatically skip on missing torch
203+ @pytest .fixture (params = [] if skip_torch else ["unet2d_diff_output_shape" ])
204+ def unet2d_diff_output_shape (request ):
205+ return pytest .model_packages [request .param ]
206+
207+
208+ # written as model group to automatically skip on missing torch
209+ @pytest .fixture (params = [] if skip_torch else ["unet2d_fixed_shape" ])
210+ def unet2d_fixed_shape (request ):
211+ return pytest .model_packages [request .param ]
212+
213+
214+ # written as model group to automatically skip on missing tensorflow 1
215+ @pytest .fixture (params = [] if skip_tensorflow or tf_major_version != 1 else ["stardist_wrong_shape" ])
216+ def stardist_wrong_shape (request ):
217+ return pytest .model_packages [request .param ]
218+
219+
220+ # written as model group to automatically skip on missing tensorflow 1
221+ @pytest .fixture (params = [] if skip_tensorflow or tf_major_version != 1 else ["stardist_wrong_shape2" ])
222+ def stardist_wrong_shape2 (request ):
223+ return pytest .model_packages [request .param ]
224+
225+
226+ # written as model group to automatically skip on missing tensorflow 1
227+ @pytest .fixture (params = [] if skip_tensorflow or tf_major_version != 1 else ["stardist" ])
228+ def stardist (request ):
229+ return pytest .model_packages [request .param ]
0 commit comments