We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent cb0f3b4 commit 305f2b4Copy full SHA for 305f2b4
src/diffusers/loaders/lora.py
@@ -1294,7 +1294,7 @@ def set_lora_device(self, adapter_names: List[str], device: Union[torch.device,
1294
text_encoder_module.lora_B[adapter_name].to(device)
1295
# this is a param, not a module, so device placement is not in-place -> re-assign
1296
if (
1297
- hasattr(text_encoder, "lora_magnitude_vector")
+ hasattr(text_encoder_module, "lora_magnitude_vector")
1298
and text_encoder_module.lora_magnitude_vector is not None
1299
):
1300
text_encoder_module.lora_magnitude_vector[
tests/lora/test_lora_layers_sdxl.py
@@ -194,7 +194,7 @@ def test_sdxl_1_0_lora(self):
194
).images
195
196
images = images[0, -3:, -3:, -1].flatten()
197
- expected = np.array([0.4468, 0.4087, 0.4134, 0.366, 0.3202, 0.3505, 0.3786, 0.387, 0.3535])
+ expected = np.array([0.4468, 0.4061, 0.4134, 0.3637, 0.3202, 0.365, 0.3786, 0.3725, 0.3535])
198
199
max_diff = numpy_cosine_similarity_distance(expected, images)
200
assert max_diff < 1e-4
@@ -283,7 +283,7 @@ def test_sdxl_1_0_lora_fusion(self):
283
284
285
# This way we also test equivalence between LoRA fusion and the non-fusion behaviour.
286
287
288
289
0 commit comments