We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent fc04fa7 commit b11a5f8Copy full SHA for b11a5f8
onnx_diagnostic/torch_export_patches/patches/patch_transformers.py
@@ -2097,11 +2097,11 @@ class patched_Qwen2_5_VisionTransformerPretrainedModel:
2097
2098
def rot_pos_emb(self, grid_thw):
2099
pos_ids = []
2100
- for thw in grid_thw:
+ for thw_ in grid_thw:
2101
# PATCHED: avoid unbind
2102
- t = thw[0]
2103
- h = thw[1]
2104
- w = thw[2]
+ t = thw_[0]
+ h = thw_[1]
+ w = thw_[2]
2105
hpos_ids = torch.arange(h).unsqueeze(1).expand(-1, w)
2106
hpos_ids = hpos_ids.reshape(
2107
h // self.spatial_merge_size,
0 commit comments