|
2 | 2 | from typing import Callable |
3 | 3 | import torch |
4 | 4 | from torch._dynamo._trace_wrapped_higher_order_op import TransformGetItemToIndex |
5 | | -from onnx_diagnostic.ext_test_case import ExtTestCase, requires_torch, requires_transformers |
| 5 | +from onnx_diagnostic.ext_test_case import ( |
| 6 | + ExtTestCase, |
| 7 | + requires_torch, |
| 8 | + requires_transformers, |
| 9 | + has_torch, |
| 10 | +) |
6 | 11 | from onnx_diagnostic.torch_export_patches import torch_export_patches |
7 | 12 | from onnx_diagnostic.torch_export_patches.patch_inputs import use_dyn_not_str |
8 | 13 |
|
@@ -252,28 +257,61 @@ def forward(self, x, ind1, ind2): |
252 | 257 | expected = model(*inputs) |
253 | 258 |
|
254 | 259 | dynamic_string = ({0: "A", 1: "B"}, {0: "C", 1: "D"}, {0: "E"}) |
| 260 | + # ({0: DYN, 1: DYN}, {0: DYN, 1: DYN}, {0: DYN}) |
| 261 | + |
255 | 262 | dynamic_shapes = use_dyn_not_str(dynamic_string) |
256 | | - with self.subTest(name="export 0/1 specialized due to hint of 1 for dimension"): |
| 263 | + with self.subTest( |
| 264 | + name="export 0/1 specialized due to hint of 1 for dimension", |
| 265 | + dynamic_shapes=dynamic_shapes, |
| 266 | + ): |
257 | 267 | try: |
258 | 268 | torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
259 | 269 | raise AssertionError("torch fixed that case") |
260 | 270 | except ValueError as e: |
261 | 271 | self.assertIn("export 0/1 specialized due to hint of 1 for dimension", str(e)) |
262 | 272 |
|
263 | | - with self.subTest(name="expected shape should be broadcastable to"): |
264 | | - try: |
| 273 | + dynamic_shapes = use_dyn_not_str(dynamic_string, torch.export.Dim.AUTO) |
| 274 | + if has_torch("2.9"): |
| 275 | + with self.subTest( |
| 276 | + name="expected shape should be broadcastable to (>= 2.9)", |
| 277 | + dynamic_shapes=dynamic_shapes, |
| 278 | + ): |
| 279 | + try: |
| 280 | + with torch.fx.experimental._config.patch(backed_size_oblivious=True): |
| 281 | + torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 282 | + raise AssertionError("torch fixed that case") |
| 283 | + except RuntimeError as e: |
| 284 | + self.assertIn("expected shape should be broadcastable to", str(e)) |
| 285 | + |
| 286 | + if not has_torch("2.9"): |
| 287 | + with self.subTest( |
| 288 | + name="expected shape should be broadcastable to (< 2.9)", |
| 289 | + dynamic_shapes=dynamic_shapes, |
| 290 | + ): |
265 | 291 | with torch.fx.experimental._config.patch(backed_size_oblivious=True): |
266 | | - torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
267 | | - raise AssertionError("torch fixed that case") |
268 | | - except RuntimeError as e: |
269 | | - self.assertIn("expected shape should be broadcastable to", str(e)) |
| 292 | + ep = torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 293 | + got = ep.module()(*inputs) |
| 294 | + self.assertEqualArray(expected, got) |
270 | 295 |
|
271 | | - with self.subTest(name="patch for 0/1"): |
| 296 | + with self.subTest(name="patch for 0/1", dynamic_shapes=dynamic_shapes): |
272 | 297 | with torch_export_patches(): |
273 | 298 | ep = torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
274 | 299 | got = ep.module()(*inputs) |
275 | 300 | self.assertEqualArray(expected, got) |
276 | 301 |
|
| 302 | + if has_torch("2.11"): |
| 303 | + # Missing PR https://github.com/pytorch/pytorch/pull/164225 |
| 304 | + # Needs more thinking about the patch to apply for this particular example. |
| 305 | + with self.subTest( |
| 306 | + name="patch for 0/1 with oblivious", dynamic_shapes=dynamic_shapes |
| 307 | + ): |
| 308 | + with torch_export_patches(), torch.fx.experimental._config.patch( |
| 309 | + backed_size_oblivious=True |
| 310 | + ): |
| 311 | + ep = torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 312 | + got = ep.module()(*inputs) |
| 313 | + self.assertEqualArray(expected, got) |
| 314 | + |
277 | 315 |
|
278 | 316 | if __name__ == "__main__": |
279 | 317 | unittest.main(verbosity=2) |
0 commit comments