Skip to content

Commit ffbabb5

Browse files
committed
make style
1 parent 1040c91 commit ffbabb5

File tree

2 files changed

+6
-4
lines changed

2 files changed

+6
-4
lines changed

src/diffusers/hooks/hooks.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,9 @@ def new_forward(module, *args, **kwargs):
134134
output = old_forward(*args, **kwargs)
135135
return hook.post_forward(module, output)
136136

137-
self._module_ref.forward = functools.update_wrapper(functools.partial(new_forward, self._module_ref), old_forward)
137+
self._module_ref.forward = functools.update_wrapper(
138+
functools.partial(new_forward, self._module_ref), old_forward
139+
)
138140

139141
self.hooks[name] = hook
140142
self._hook_order.append(name)
@@ -156,7 +158,7 @@ def reset_stateful_hooks(self, recurse: bool = True) -> None:
156158
hook = self.hooks[hook_name]
157159
if hook._is_stateful:
158160
hook.reset_state(self._module_ref)
159-
161+
160162
if recurse:
161163
for module in self._module_ref.modules():
162164
if hasattr(module, "_diffusers_hook"):

src/diffusers/hooks/pyramid_attention_broadcast.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def __init__(self) -> None:
106106
def reset(self):
107107
self.iteration = 0
108108
self.cache = None
109-
109+
110110
def __repr__(self):
111111
cache_repr = ""
112112
if self.cache is None:
@@ -251,7 +251,7 @@ def _apply_pyramid_attention_broadcast_on_attention_class(
251251
def skip_callback(module: torch.nn.Module) -> bool:
252252
hook: PyramidAttentionBroadcastHook = module._diffusers_hook.get_hook("pyramid_attention_broadcast")
253253
pab_state: PyramidAttentionBroadcastState = hook.state
254-
254+
255255
if pab_state.cache is None:
256256
return False
257257

0 commit comments

Comments
 (0)