|
6 | 6 | ExtTestCase, |
7 | 7 | requires_torch, |
8 | 8 | requires_transformers, |
| 9 | + has_transformers, |
9 | 10 | has_torch, |
10 | 11 | ) |
| 12 | +from onnx_diagnostic.helpers.cache_helper import CacheKeyValue, make_dynamic_cache |
| 13 | +from onnx_diagnostic.helpers.torch_helper import torch_deepcopy |
| 14 | +from onnx_diagnostic.torch_models.hghub import get_untrained_model_with_inputs |
11 | 15 | from onnx_diagnostic.torch_export_patches import torch_export_patches |
12 | 16 | from onnx_diagnostic.torch_export_patches.patch_inputs import use_dyn_not_str |
13 | 17 |
|
@@ -317,6 +321,125 @@ def forward(self, x, ind1, ind2): |
317 | 321 | got = ep.module()(*inputs) |
318 | 322 | self.assertEqualArray(expected, got) |
319 | 323 |
|
| 324 | + def test_patched__broadcast_in_dim_meta(self): |
| 325 | + class Model(torch.nn.Module): |
| 326 | + def forward(self, x, ind1, ind2): |
| 327 | + return x[ind1, ind2] |
| 328 | + |
| 329 | + inputs = ( |
| 330 | + torch.randn(2, 1024), |
| 331 | + torch.tensor([[0, 1]], dtype=torch.int64).T, |
| 332 | + torch.arange(1024, dtype=torch.int64), |
| 333 | + ) |
| 334 | + model = Model() |
| 335 | + expected = model(*inputs) |
| 336 | + |
| 337 | + with ( |
| 338 | + torch.fx.experimental._config.patch(backed_size_oblivious=True), |
| 339 | + torch_export_patches(), |
| 340 | + ): |
| 341 | + ep = torch.export.export( |
| 342 | + model, |
| 343 | + inputs, |
| 344 | + dynamic_shapes=use_dyn_not_str(({0: "A", 1: "B"}, {0: "C", 1: "D"}, {0: "E"})), |
| 345 | + ) |
| 346 | + self.assertEqualArray(expected, ep.module()(*inputs), atol=1e-2) |
| 347 | + |
| 348 | + @requires_torch("2.7.9999") |
| 349 | + @requires_transformers("4.49.9999") |
| 350 | + def test_export_with_patch_tiny_llm_dim_meta(self): |
| 351 | + data = get_untrained_model_with_inputs("arnir0/Tiny-LLM", verbose=0) |
| 352 | + model, inputs, ds = data["model"], data["inputs"], data["dynamic_shapes"] |
| 353 | + order = ["input_ids", "attention_mask", "position_ids", "past_key_values"] |
| 354 | + self.assertEqual(list(inputs), order) |
| 355 | + expected = model(**torch_deepcopy(inputs)) |
| 356 | + with self.subTest(input="no01", backed_size_oblivious=False): |
| 357 | + with torch_export_patches(patch_transformers=True): |
| 358 | + ep = torch.export.export( |
| 359 | + model, (), kwargs=inputs, dynamic_shapes=use_dyn_not_str(ds) |
| 360 | + ) |
| 361 | + got = ep.module()(**torch_deepcopy(inputs)) |
| 362 | + self.assertEqualArrayAny(expected, got) |
| 363 | + |
| 364 | + with self.subTest(input="no01", backed_size_oblivious=True): |
| 365 | + if not has_transformers("4.55"): |
| 366 | + raise unittest.SkipTest("test not working with transformers<4.55") |
| 367 | + with ( |
| 368 | + torch.fx.experimental._config.patch(backed_size_oblivious=True), |
| 369 | + torch_export_patches(patch_transformers=True), |
| 370 | + ): |
| 371 | + ep = torch.export.export( |
| 372 | + model, (), kwargs=inputs, dynamic_shapes=use_dyn_not_str(ds) |
| 373 | + ) |
| 374 | + got = ep.module()(**torch_deepcopy(inputs)) |
| 375 | + self.assertEqualArrayAny(expected, got) |
| 376 | + |
| 377 | + def _batch1(t): |
| 378 | + if t.__class__.__name__ == "DynamicCache": |
| 379 | + kv = CacheKeyValue(t) |
| 380 | + keys = [t[:1] for t in kv.key_cache] |
| 381 | + values = [t[:1] for t in kv.value_cache] |
| 382 | + return make_dynamic_cache(tuple(zip(keys, values))) |
| 383 | + if t.ndim > 1: |
| 384 | + return t[:1] |
| 385 | + return t |
| 386 | + |
| 387 | + export_inputs = {k: _batch1(v) for k, v in inputs.items()} |
| 388 | + |
| 389 | + # with self.subTest(input="batch1", backed_size_oblivious=False): |
| 390 | + # with torch_export_patches(patch_transformers=True): |
| 391 | + # ep = torch.export.export( |
| 392 | + # model, (), kwargs=export_inputs, dynamic_shapes=use_dyn_not_str(ds) |
| 393 | + # ) |
| 394 | + # got = ep.module()(**torch_deepcopy(inputs)) |
| 395 | + # self.assertEqualArrayAny(expected, got) |
| 396 | + |
| 397 | + with self.subTest(input="batch1", backed_size_oblivious=True): |
| 398 | + if not has_transformers("4.55"): |
| 399 | + raise unittest.SkipTest("test not working with transformers<4.55") |
| 400 | + with ( |
| 401 | + torch.fx.experimental._config.patch(backed_size_oblivious=True), |
| 402 | + torch_export_patches(patch_transformers=True), |
| 403 | + ): |
| 404 | + ep = torch.export.export( |
| 405 | + model, (), kwargs=export_inputs, dynamic_shapes=use_dyn_not_str(ds) |
| 406 | + ) |
| 407 | + try: |
| 408 | + got = ep.module()(**torch_deepcopy(inputs)) |
| 409 | + except AssertionError as e: |
| 410 | + got = None |
| 411 | + if "Guard failed: position_ids.size()[0] == 1" not in str(e): |
| 412 | + raise |
| 413 | + |
| 414 | + if got is not None: |
| 415 | + self.assertEqualArrayAny(expected, got) |
| 416 | + |
| 417 | + if "inputs_empty_cache" not in data: |
| 418 | + return |
| 419 | + |
| 420 | + export_inputs = data["inputs_empty_cache"] |
| 421 | + |
| 422 | + # with self.subTest(input="cache0", backed_size_oblivious=False): |
| 423 | + # with torch_export_patches(patch_transformers=True): |
| 424 | + # ep = torch.export.export( |
| 425 | + # model, (), kwargs=export_inputs, dynamic_shapes=use_dyn_not_str(ds) |
| 426 | + # ) |
| 427 | + # got = ep.module()(**torch_deepcopy(inputs)) |
| 428 | + # self.assertEqualArrayAny(expected, got) |
| 429 | + |
| 430 | + with self.subTest(input="cache0", backed_size_oblivious=True): |
| 431 | + if not has_transformers("4.55"): |
| 432 | + raise unittest.SkipTest("test not working with transformers<4.55") |
| 433 | + with ( |
| 434 | + torch.fx.experimental._config.patch(backed_size_oblivious=True), |
| 435 | + torch_export_patches(patch_transformers=True), |
| 436 | + ): |
| 437 | + ep = torch.export.export( |
| 438 | + model, (), kwargs=export_inputs, dynamic_shapes=use_dyn_not_str(ds) |
| 439 | + ) |
| 440 | + got = ep.module()(**torch_deepcopy(inputs)) |
| 441 | + self.assertEqualArrayAny(expected, got) |
| 442 | + |
320 | 443 |
|
321 | 444 | if __name__ == "__main__": |
322 | 445 | unittest.main(verbosity=2) |
0 commit comments