Skip to content

Commit a1a9adc

Browse files
committed
run code formatter
1 parent a75ac12 commit a1a9adc

File tree

6 files changed

+5
-5
lines changed

6 files changed

+5
-5
lines changed

src/autointent/_dump_tools/unit_dumpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ def dump(obj: "PeftModel", path: Path, exists_ok: bool) -> None:
217217
# strategy to save lora models: merge adapters and save as usual hugging face model
218218
lora_path = path / "lora"
219219
lora_path.mkdir(parents=True, exist_ok=exists_ok)
220-
merged_model: "PreTrainedModel" = obj.merge_and_unload()
220+
merged_model: PreTrainedModel = obj.merge_and_unload()
221221
merged_model.save_pretrained(lora_path)
222222

223223
@staticmethod

src/autointent/_wrappers/embedder/sentence_transformers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ def train(self, utterances: list[str], labels: ListOfLabels, config: EmbedderFin
257257
eval_strategy="epoch",
258258
greater_is_better=False,
259259
)
260-
callbacks: list["TrainerCallback"] = [
260+
callbacks: list[TrainerCallback] = [
261261
transformers.EarlyStoppingCallback(
262262
early_stopping_patience=config.early_stopping_patience,
263263
early_stopping_threshold=config.early_stopping_threshold,

src/autointent/context/data_handler/_stratification.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ def _split_multilabel(self, dataset: HFDataset, test_size: float) -> Sequence[np
156156
A sequence containing indices for train and test splits.
157157
"""
158158
if self.random_seed is not None:
159-
# Set all seeds for reproducibility (workaround for buggy nature of IterativeStratification from skmultilearn)
159+
# Set all seeds for reproducibility (workaround for bugs in IterativeStratification from skmultilearn)
160160
random.seed(self.random_seed)
161161
splitter = IterativeStratification(
162162
n_splits=2,

src/autointent/modules/scoring/_bert.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ def _train(self, tokenized_dataset: DatasetDict) -> None:
201201
trainer.train()
202202

203203
def _get_trainer_callbacks(self) -> list["TrainerCallback"]:
204-
res: list["TrainerCallback"] = []
204+
res: list[TrainerCallback] = []
205205
if self.early_stopping_config.metric is not None:
206206
res.append(
207207
self._EarlyStoppingCallback(

src/autointent/modules/scoring/_gcn/gcn_model.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import json
22
from pathlib import Path
3-
from typing import cast
43

54
import torch
65
from pydantic import BaseModel

tests/modules/scoring/test_catboost.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
pytest.importorskip("catboost")
1212

13+
1314
def test_catboost_scorer_dump_load(dataset):
1415
"""Test that CatBoostScorer can be saved and loaded while preserving predictions."""
1516
data_handler = DataHandler(dataset)

0 commit comments

Comments
 (0)