Skip to content

Commit 6711052

Browse files
authored
Merge branch 'master' into fix/init_parsing
2 parents b768149 + 02f92a9 commit 6711052

File tree

3 files changed

+16
-7
lines changed

3 files changed

+16
-7
lines changed

Makefile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,11 @@ export SPHINX_MOCK_REQUIREMENTS=1
77
# install only Lightning Trainer packages
88
export PACKAGE_NAME=pytorch
99

10+
11+
# In Lightning Studio, the `lightning` package comes pre-installed.
12+
# Uninstall it first to ensure the editable install works correctly.
1013
setup:
14+
uv pip uninstall lightning pytorch-lightning lightning-fabric || true
1115
uv pip install -r requirements.txt \
1216
-r requirements/pytorch/base.txt \
1317
-r requirements/pytorch/test.txt \

src/lightning/pytorch/trainer/connectors/logger_connector/result.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,7 @@ def _generate_sync_fn(self) -> None:
9191
"""Used to compute the syncing function and cache it."""
9292
fn = self.no_op if self.fn is None or not self.should or self.rank_zero_only else self.fn
9393
# save the function as `_fn` as the meta are being re-created and the object references need to match.
94-
# ignore typing, bad support for `partial`: mypy/issues/1484
95-
self._fn: Callable = partial(fn, reduce_op=self.op, group=self.group) # type: ignore[unused-ignore]
94+
self._fn: Callable = partial(fn, reduce_op=self.op, group=self.group)
9695

9796
@property
9897
def __call__(self) -> Any:

src/lightning/pytorch/utilities/model_helpers.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from lightning_utilities.core.imports import RequirementCache
2121
from torch import nn
22-
from typing_extensions import Concatenate, ParamSpec
22+
from typing_extensions import Concatenate, ParamSpec, override
2323

2424
import lightning.pytorch as pl
2525

@@ -104,26 +104,32 @@ def _check_mixed_imports(instance: object) -> None:
104104
_R_co = TypeVar("_R_co", covariant=True) # return type of the decorated method
105105

106106

107-
class _restricted_classmethod_impl(Generic[_T, _R_co, _P]):
107+
class _restricted_classmethod_impl(classmethod, Generic[_T, _P, _R_co]):
108108
"""Drop-in replacement for @classmethod, but raises an exception when the decorated method is called on an instance
109109
instead of a class type."""
110110

111+
method: Callable[Concatenate[type[_T], _P], _R_co]
112+
111113
def __init__(self, method: Callable[Concatenate[type[_T], _P], _R_co]) -> None:
114+
super().__init__(method)
112115
self.method = method
113116

114-
def __get__(self, instance: Optional[_T], cls: type[_T]) -> Callable[_P, _R_co]:
117+
@override
118+
def __get__(self, instance: _T, cls: Optional[type[_T]] = None) -> Callable[_P, _R_co]: # type: ignore[override]
115119
# The wrapper ensures that the method can be inspected, but not called on an instance
116120
@functools.wraps(self.method)
117121
def wrapper(*args: Any, **kwargs: Any) -> _R_co:
118122
# Workaround for https://github.com/pytorch/pytorch/issues/67146
119123
is_scripting = any(os.path.join("torch", "jit") in frameinfo.filename for frameinfo in inspect.stack())
124+
cls_type = cls if cls is not None else type(instance)
120125
if instance is not None and not is_scripting:
121126
raise TypeError(
122-
f"The classmethod `{cls.__name__}.{self.method.__name__}` cannot be called on an instance."
127+
f"The classmethod `{cls_type.__name__}.{self.method.__name__}` cannot be called on an instance."
123128
" Please call it on the class type and make sure the return value is used."
124129
)
125-
return self.method(cls, *args, **kwargs)
130+
return self.method(cls_type, *args, **kwargs)
126131

132+
wrapper.__func__ = self.method
127133
return wrapper
128134

129135

0 commit comments

Comments
 (0)