|
2 | 2 | from typing import Callable |
3 | 3 | import torch |
4 | 4 | from torch._dynamo._trace_wrapped_higher_order_op import TransformGetItemToIndex |
5 | | -from onnx_diagnostic.ext_test_case import ExtTestCase, requires_torch, requires_transformers |
| 5 | +from onnx_diagnostic.ext_test_case import ( |
| 6 | + ExtTestCase, |
| 7 | + requires_torch, |
| 8 | + requires_transformers, |
| 9 | + has_torch, |
| 10 | +) |
| 11 | +from onnx_diagnostic.torch_export_patches import torch_export_patches |
| 12 | +from onnx_diagnostic.torch_export_patches.patch_inputs import use_dyn_not_str |
6 | 13 |
|
7 | 14 |
|
8 | 15 | class TestPatchPatchTorch(ExtTestCase): |
@@ -236,6 +243,79 @@ def forward(self, x): |
236 | 243 | ep = torch.export.export(Model(), (x,), dynamic_shapes=({0: DYN},)) |
237 | 244 | self.assertEqualArray(Model()(x), ep.module()(x)) |
238 | 245 |
|
| 246 | + def test_oblivious_for_dimension_01(self): |
| 247 | + class Model(torch.nn.Module): |
| 248 | + def forward(self, x, ind1, ind2): |
| 249 | + return x[ind1, ind2] |
| 250 | + |
| 251 | + inputs = ( |
| 252 | + torch.randn(2, 1024), |
| 253 | + torch.tensor([[0, 1]], dtype=torch.int64).T, |
| 254 | + torch.arange(1024, dtype=torch.int64), |
| 255 | + ) |
| 256 | + model = Model() |
| 257 | + expected = model(*inputs) |
| 258 | + |
| 259 | + dynamic_string = ({0: "A", 1: "B"}, {0: "C", 1: "D"}, {0: "E"}) |
| 260 | + # ({0: DYN, 1: DYN}, {0: DYN, 1: DYN}, {0: DYN}) |
| 261 | + |
| 262 | + dynamic_shapes = use_dyn_not_str(dynamic_string) |
| 263 | + with self.subTest( |
| 264 | + name="export 0/1 specialized due to hint of 1 for dimension", |
| 265 | + dynamic_shapes=dynamic_shapes, |
| 266 | + ): |
| 267 | + try: |
| 268 | + torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 269 | + raise AssertionError("torch fixed that case") |
| 270 | + except ValueError as e: |
| 271 | + self.assertIn("export 0/1 specialized due to hint of 1 for dimension", str(e)) |
| 272 | + |
| 273 | + dynamic_shapes = use_dyn_not_str(dynamic_string, torch.export.Dim.AUTO) |
| 274 | + if has_torch("2.9"): |
| 275 | + with self.subTest( |
| 276 | + name="expected shape should be broadcastable to (>= 2.9)", |
| 277 | + dynamic_shapes=dynamic_shapes, |
| 278 | + ): |
| 279 | + try: |
| 280 | + with torch.fx.experimental._config.patch(backed_size_oblivious=True): |
| 281 | + torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 282 | + raise AssertionError("torch fixed that case") |
| 283 | + except RuntimeError as e: |
| 284 | + self.assertIn("expected shape should be broadcastable to", str(e)) |
| 285 | + |
| 286 | + if not has_torch("2.9"): |
| 287 | + with self.subTest( |
| 288 | + name="expected shape should be broadcastable to (< 2.9)", |
| 289 | + dynamic_shapes=dynamic_shapes, |
| 290 | + ): |
| 291 | + try: |
| 292 | + with torch.fx.experimental._config.patch(backed_size_oblivious=True): |
| 293 | + torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 294 | + except RuntimeError as e: |
| 295 | + self.assertIn( |
| 296 | + "Expected input at *args[2].shape[0] to be equal to 1, but got 1024", |
| 297 | + str(e), |
| 298 | + ) |
| 299 | + |
| 300 | + with self.subTest(name="patch for 0/1", dynamic_shapes=dynamic_shapes): |
| 301 | + with torch_export_patches(): |
| 302 | + ep = torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 303 | + got = ep.module()(*inputs) |
| 304 | + self.assertEqualArray(expected, got) |
| 305 | + |
| 306 | + if has_torch("2.11"): |
| 307 | + # Missing PR https://github.com/pytorch/pytorch/pull/164225 |
| 308 | + # Needs more thinking about the patch to apply for this particular example. |
| 309 | + with self.subTest( |
| 310 | + name="patch for 0/1 with oblivious", dynamic_shapes=dynamic_shapes |
| 311 | + ): |
| 312 | + with torch_export_patches(), torch.fx.experimental._config.patch( |
| 313 | + backed_size_oblivious=True |
| 314 | + ): |
| 315 | + ep = torch.export.export(model, inputs, dynamic_shapes=dynamic_shapes) |
| 316 | + got = ep.module()(*inputs) |
| 317 | + self.assertEqualArray(expected, got) |
| 318 | + |
239 | 319 |
|
240 | 320 | if __name__ == "__main__": |
241 | 321 | unittest.main(verbosity=2) |
0 commit comments