Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion autointent/_callbacks/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(self) -> None:
Raises an ImportError if neither are installed.
"""
try:
from torch.utils.tensorboard import SummaryWriter
from torch.utils.tensorboard import SummaryWriter # type: ignore[attr-defined]

self.writer = SummaryWriter
except ImportError:
Expand Down
7 changes: 7 additions & 0 deletions autointent/_callbacks/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,11 @@ def log_metrics(self, metrics: dict[str, Any]) -> None:
"""
self.wandb.log(metrics)

def _close_current_run(self) -> None:
"""Close the current W&B run if open."""
if self.wandb.run is not None:
self.wandb.finish()

def log_final_metrics(self, metrics: dict[str, Any]) -> None:
"""Logs final evaluation metrics to W&B.
Expand All @@ -97,6 +102,8 @@ def log_final_metrics(self, metrics: dict[str, Any]) -> None:
Args:
metrics: A dictionary of final performance metrics.
"""
self._close_current_run()

wandb_run_init_args = {
"project": self.project_name,
"group": self.group,
Expand Down
4 changes: 2 additions & 2 deletions autointent/_dump_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,15 +229,15 @@ def load( # noqa: C901, PLR0912, PLR0915
elif child.name == Dumper.ptuning_models:
for model_dir in child.iterdir():
try:
model = AutoModelForSequenceClassification.from_pretrained(model_dir / "base_model")
model = AutoModelForSequenceClassification.from_pretrained(model_dir / "base_model") # type: ignore[no-untyped-call]
hf_models[model_dir.name] = PeftModel.from_pretrained(model, model_dir / "peft")
except Exception as e: # noqa: PERF203
msg = f"Error loading PeftModel {model_dir.name}: {e}"
logger.exception(msg)
elif child.name == Dumper.hf_models:
for model_dir in child.iterdir():
try:
hf_models[model_dir.name] = AutoModelForSequenceClassification.from_pretrained(model_dir)
hf_models[model_dir.name] = AutoModelForSequenceClassification.from_pretrained(model_dir) # type: ignore[no-untyped-call]
except Exception as e: # noqa: PERF203
msg = f"Error loading HF model {model_dir.name}: {e}"
logger.exception(msg)
Expand Down
5 changes: 5 additions & 0 deletions autointent/modules/base/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@ class BaseModule(ABC):
name: str
"""Name of the module."""

@property
def trial_name(self) -> str:
"""Name of the module for logging."""
return self.name

@abstractmethod
def fit(self, *args: tuple[Any], **kwargs: dict[str, Any]) -> None:
"""Fit the model.
Expand Down
2 changes: 1 addition & 1 deletion autointent/modules/scoring/_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _initialize_model(self) -> Any: # noqa: ANN401
label2id = {i: i for i in range(self._n_classes)}
id2label = {i: i for i in range(self._n_classes)}

return AutoModelForSequenceClassification.from_pretrained(
return AutoModelForSequenceClassification.from_pretrained( # type: ignore[no-untyped-call]
self.classification_model_config.model_name,
trust_remote_code=self.classification_model_config.trust_remote_code,
num_labels=self._n_classes,
Expand Down
4 changes: 4 additions & 0 deletions autointent/modules/scoring/_sklearn/sklearn_scorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,10 @@ def __init__(
logger.error(msg)
raise ValueError(msg)

@property
def trial_name(self) -> str:
return f"sklearn_{self.clf_name}"

@classmethod
def from_context(
cls,
Expand Down
2 changes: 1 addition & 1 deletion autointent/nodes/_node_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def objective(
module = self.node_info.modules_available[module_name].from_context(context, **config)
config.update(module.get_implicit_initialization_params())

context.callback_handler.start_module(module_name=module_name, num=self._counter, module_kwargs=config)
context.callback_handler.start_module(module_name=module.trial_name, num=self._counter, module_kwargs=config)

self._logger.debug("Scoring %s module...", module_name)

Expand Down
Loading