Skip to content

Commit 931f09c

Browse files
chore: Removed dead-code and improved type-annotations
1 parent 6a0ddf6 commit 931f09c

File tree

1 file changed

+5
-68
lines changed

1 file changed

+5
-68
lines changed

src/lightning/pytorch/loggers/neptune.py

Lines changed: 5 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -980,7 +980,7 @@ def flatten(d: dict, prefix: str = "") -> dict[str, Any]:
980980

981981
@override
982982
@rank_zero_only
983-
def log_metrics(self, metrics: dict[str, Union[Tensor, float]], step: int) -> None:
983+
def log_metrics(self, metrics: dict[str, Union[Tensor, float]], step: Optional[int] = None) -> None:
984984
"""Log metrics (numeric values) in Neptune runs.
985985
986986
Args:
@@ -1025,88 +1025,25 @@ def save_dir(self) -> Optional[str]:
10251025

10261026
@rank_zero_only
10271027
def log_model_summary(self, model: "pl.LightningModule", max_depth: int = -1) -> None:
1028+
"""Not implemented for Neptune Scale."""
10281029
log.warning("Neptune Scale does not support logging model summaries.")
10291030
return
1030-
from neptune.types import File
1031-
1032-
model_str = str(ModelSummary(model=model, max_depth=max_depth))
1033-
self.run[self._construct_path_with_prefix("model/summary")] = File.from_content(
1034-
content=model_str, extension="txt"
1035-
)
10361031

10371032
@override
10381033
@rank_zero_only
10391034
def after_save_checkpoint(self, checkpoint_callback: Checkpoint) -> None:
1040-
"""Automatically log checkpointed model. Called after model checkpoint callback saves a new checkpoint.
1041-
1042-
Args:
1043-
checkpoint_callback: the model checkpoint callback instance
1044-
1045-
"""
1046-
if not self._log_model_checkpoints:
1047-
return
1048-
1049-
file_names = set()
1050-
checkpoints_namespace = self._construct_path_with_prefix("model/checkpoints")
1051-
1052-
# save last model
1053-
if hasattr(checkpoint_callback, "last_model_path") and checkpoint_callback.last_model_path:
1054-
model_last_name = self._get_full_model_name(checkpoint_callback.last_model_path, checkpoint_callback)
1055-
file_names.add(model_last_name)
1056-
self.run[f"{checkpoints_namespace}/{model_last_name}"].upload(checkpoint_callback.last_model_path)
1057-
1058-
# save best k models
1059-
if hasattr(checkpoint_callback, "best_k_models"):
1060-
for key in checkpoint_callback.best_k_models:
1061-
model_name = self._get_full_model_name(key, checkpoint_callback)
1062-
file_names.add(model_name)
1063-
self.run[f"{checkpoints_namespace}/{model_name}"].upload(key)
1064-
1065-
# log best model path and checkpoint
1066-
if hasattr(checkpoint_callback, "best_model_path") and checkpoint_callback.best_model_path:
1067-
self.run[self._construct_path_with_prefix("model/best_model_path")] = checkpoint_callback.best_model_path
1068-
1069-
model_name = self._get_full_model_name(checkpoint_callback.best_model_path, checkpoint_callback)
1070-
file_names.add(model_name)
1071-
self.run[f"{checkpoints_namespace}/{model_name}"].upload(checkpoint_callback.best_model_path)
1072-
1073-
# remove old models logged to experiment if they are not part of best k models at this point
1074-
if self.run.exists(checkpoints_namespace):
1075-
exp_structure = self.run.get_structure()
1076-
uploaded_model_names = self._get_full_model_names_from_exp_structure(exp_structure, checkpoints_namespace)
1077-
1078-
for file_to_drop in list(uploaded_model_names - file_names):
1079-
del self.run[f"{checkpoints_namespace}/{file_to_drop}"]
1080-
1081-
# log best model score
1082-
if hasattr(checkpoint_callback, "best_model_score") and checkpoint_callback.best_model_score:
1083-
self.run[self._construct_path_with_prefix("model/best_model_score")] = (
1084-
checkpoint_callback.best_model_score.cpu().detach().numpy()
1085-
)
1035+
"""Not implemented for Neptune Scale."""
1036+
return
10861037

10871038
@staticmethod
10881039
def _get_full_model_name(model_path: str, checkpoint_callback: Checkpoint) -> None:
10891040
"""Returns model name which is string `model_path` appended to `checkpoint_callback.dirpath`."""
1090-
return None
1091-
if hasattr(checkpoint_callback, "dirpath"):
1092-
model_path = os.path.normpath(model_path)
1093-
expected_model_path = os.path.normpath(checkpoint_callback.dirpath)
1094-
if not model_path.startswith(expected_model_path):
1095-
raise ValueError(f"{model_path} was expected to start with {expected_model_path}.")
1096-
# Remove extension from filepath
1097-
filepath, _ = os.path.splitext(model_path[len(expected_model_path) + 1 :])
1098-
return filepath.replace(os.sep, "/")
1099-
return model_path.replace(os.sep, "/")
1041+
return
11001042

11011043
@classmethod
11021044
def _get_full_model_names_from_exp_structure(cls, exp_structure: dict[str, Any], namespace: str) -> set[None]:
11031045
"""Returns all paths to properties which were already logged in `namespace`"""
11041046
return set()
1105-
structure_keys: list[str] = namespace.split(cls.LOGGER_JOIN_CHAR)
1106-
for key in structure_keys:
1107-
exp_structure = exp_structure[key]
1108-
uploaded_models_dict = exp_structure
1109-
return set(cls._dict_paths(uploaded_models_dict))
11101047

11111048
@classmethod
11121049
def _dict_paths(cls, d: dict[str, Any], path_in_build: Optional[str] = None) -> Generator:

0 commit comments

Comments
 (0)