We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b0e8322 commit c2222dbCopy full SHA for c2222db
onnx_diagnostic/torch_export_patches/onnx_export_errors.py
@@ -567,6 +567,7 @@ def torch_export_patches(
567
sdpa_attention is not None
568
and modeling_utils is not None
569
and hasattr(sdpa_attention, "sdpa_attention_forward")
570
+ and hasattr(sdpa_attention, "use_gqa_in_sdpa")
571
and hasattr(modeling_utils, "AttentionInterface")
572
):
573
if verbose:
@@ -776,6 +777,7 @@ def torch_export_patches(
776
777
778
779
780
781
782
783
sdpa_attention.sdpa_attention_forward = f_sdpa_attention_forward
0 commit comments