Skip to content

Commit 3adaf74

Browse files
Vdalekedolfim-ibm
andauthored
fix: python3.13 dependencies compatibility (#91)
Signed-off-by: Matvei Smirnov <[email protected]> Signed-off-by: Michele Dolfi <[email protected]> Co-authored-by: Michele Dolfi <[email protected]>
1 parent 55d59aa commit 3adaf74

File tree

3 files changed

+1560
-1440
lines changed

3 files changed

+1560
-1440
lines changed

docling_ibm_models/code_formula_model/models/sam_opt.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -82,10 +82,10 @@ def forward(
8282
inputs_embeds = self.embed_tokens(input_ids)
8383

8484
vision_tower = getattr(self, "vision_tower", None)
85-
im_start_token = getattr(self.config, "im_start_token", -1)
85+
im_start_token = getattr(self.config, "im_start_token", -1) # type: ignore
8686

87-
if input_ids.shape[1] != 1 or self.training:
88-
with torch.set_grad_enabled(self.training):
87+
if input_ids.shape[1] != 1 or self.training: # type: ignore
88+
with torch.set_grad_enabled(self.training): # type: ignore
8989
assert vision_tower is not None
9090
image_features = vision_tower(images)
9191
image_features = image_features.flatten(2).permute(0, 2, 1)
@@ -118,7 +118,7 @@ def forward(
118118

119119
inputs_embeds = torch.stack(new_input_embeds, dim=0) # type: ignore
120120

121-
return super(SamOPTModel, self).forward(
121+
return super(SamOPTModel, self).forward( # type: ignore
122122
input_ids=None,
123123
attention_mask=attention_mask,
124124
past_key_values=past_key_values,
@@ -165,12 +165,12 @@ def forward(
165165
output_attentions = (
166166
output_attentions
167167
if output_attentions is not None
168-
else self.config.output_attentions
168+
else self.config.output_attentions # type: ignore
169169
)
170170
output_hidden_states = (
171171
output_hidden_states
172172
if output_hidden_states is not None
173-
else self.config.output_hidden_states
173+
else self.config.output_hidden_states # type: ignore
174174
)
175175

176176
outputs = self.model(

0 commit comments

Comments
 (0)