Skip to content

Commit c71e444

Browse files
committed
try to fix typing
1 parent 07cec72 commit c71e444

File tree

4 files changed

+10
-12
lines changed

4 files changed

+10
-12
lines changed

autointent/_dump_tools/unit_dumpers.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -215,13 +215,13 @@ def dump(obj: PeftModel, path: Path, exists_ok: bool) -> None:
215215
ptuning_path = path / "ptuning"
216216
ptuning_path.mkdir(parents=True, exist_ok=exists_ok)
217217
obj.save_pretrained(str(ptuning_path / "peft"))
218-
obj.base_model.save_pretrained(ptuning_path / "base_model") # type: ignore[attr-defined]
218+
obj.base_model.save_pretrained(ptuning_path / "base_model")
219219
else:
220220
# strategy to save lora models: merge adapters and save as usual hugging face model
221221
lora_path = path / "lora"
222222
lora_path.mkdir(parents=True, exist_ok=exists_ok)
223223
merged_model: PreTrainedModel = obj.merge_and_unload()
224-
merged_model.save_pretrained(lora_path) # type: ignore[attr-defined]
224+
merged_model.save_pretrained(lora_path)
225225

226226
@staticmethod
227227
def load(path: Path, **kwargs: Any) -> PeftModel: # noqa: ANN401, ARG004
@@ -248,7 +248,7 @@ class HFModelDumper(BaseObjectDumper[PreTrainedModel]):
248248
@staticmethod
249249
def dump(obj: PreTrainedModel, path: Path, exists_ok: bool) -> None:
250250
path.mkdir(parents=True, exist_ok=exists_ok)
251-
obj.save_pretrained(path) # type: ignore[attr-defined]
251+
obj.save_pretrained(path)
252252

253253
@staticmethod
254254
def load(path: Path, **kwargs: Any) -> PreTrainedModel: # noqa: ANN401, ARG004
@@ -265,7 +265,7 @@ class HFTokenizerDumper(BaseObjectDumper[PreTrainedTokenizer | PreTrainedTokeniz
265265
@staticmethod
266266
def dump(obj: PreTrainedTokenizer | PreTrainedTokenizerFast, path: Path, exists_ok: bool) -> None:
267267
path.mkdir(parents=True, exist_ok=exists_ok)
268-
obj.save_pretrained(path) # type: ignore[union-attr]
268+
obj.save_pretrained(path)
269269

270270
@staticmethod
271271
def load(path: Path, **kwargs: Any) -> PreTrainedTokenizer | PreTrainedTokenizerFast: # noqa: ANN401, ARG004

autointent/modules/scoring/_bert.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ def _train(self, tokenized_dataset: DatasetDict) -> None:
181181
load_best_model_at_end=self.early_stopping_config.metric is not None,
182182
)
183183

184-
trainer = Trainer( # type: ignore[no-untyped-call]
184+
trainer = Trainer(
185185
model=self._model,
186186
args=training_args,
187187
train_dataset=tokenized_dataset["train"],
@@ -192,10 +192,10 @@ def _train(self, tokenized_dataset: DatasetDict) -> None:
192192
callbacks=self._get_trainer_callbacks(),
193193
)
194194
if not self.print_progress:
195-
trainer.remove_callback(PrinterCallback) # type: ignore[attr-defined]
196-
trainer.remove_callback(ProgressCallback) # type: ignore[attr-defined]
195+
trainer.remove_callback(PrinterCallback)
196+
trainer.remove_callback(ProgressCallback)
197197

198-
trainer.train() # type: ignore[attr-defined]
198+
trainer.train()
199199

200200
def _get_trainer_callbacks(self) -> list[TrainerCallback]:
201201
res: list[TrainerCallback] = []

user_guides/advanced/03_automl.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,7 @@
3333
"""
3434

3535
# %%
36-
from autointent.modules import (DECISION_MODULES, EMBEDDING_MODULES,
37-
REGEX_MODULES, SCORING_MODULES)
36+
from autointent.modules import DECISION_MODULES, EMBEDDING_MODULES, REGEX_MODULES, SCORING_MODULES
3837

3938
print(list(SCORING_MODULES.keys()))
4039
print(list(DECISION_MODULES.keys()))

user_guides/basic_usage/03_automl.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,8 +115,7 @@
115115
"""
116116

117117
# %%
118-
from autointent.configs import (CrossEncoderConfig, EmbedderConfig,
119-
TokenizerConfig)
118+
from autointent.configs import CrossEncoderConfig, EmbedderConfig, TokenizerConfig
120119

121120
# Configure embedding model (used for vector representations)
122121
custom_pipeline.set_config(EmbedderConfig(model_name="prajjwal1/bert-tiny"))

0 commit comments

Comments
 (0)