Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions src/diffusers/loaders/single_file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# limitations under the License.
"""Conversion script for the Stable Diffusion checkpoints."""

import copy
import os
import re
from contextlib import nullcontext
Expand Down Expand Up @@ -91,11 +92,11 @@
"xl_inpaint": {"pretrained_model_name_or_path": "diffusers/stable-diffusion-xl-1.0-inpainting-0.1"},
"playground-v2-5": {"pretrained_model_name_or_path": "playgroundai/playground-v2.5-1024px-aesthetic"},
"upscale": {"pretrained_model_name_or_path": "stabilityai/stable-diffusion-x4-upscaler"},
"inpainting": {"pretrained_model_name_or_path": "Lykon/dreamshaper-8-inpainting"},
"inpainting": {"pretrained_model_name_or_path": "stable-diffusion-v1-5/stable-diffusion-inpainting"},
"inpainting_v2": {"pretrained_model_name_or_path": "stabilityai/stable-diffusion-2-inpainting"},
"controlnet": {"pretrained_model_name_or_path": "lllyasviel/control_v11p_sd15_canny"},
"v2": {"pretrained_model_name_or_path": "stabilityai/stable-diffusion-2-1"},
"v1": {"pretrained_model_name_or_path": "Lykon/dreamshaper-8"},
"v1": {"pretrained_model_name_or_path": "stable-diffusion-v1-5/stable-diffusion-v1-5"},
"stable_cascade_stage_b": {"pretrained_model_name_or_path": "stabilityai/stable-cascade", "subfolder": "decoder"},
"stable_cascade_stage_b_lite": {
"pretrained_model_name_or_path": "stabilityai/stable-cascade",
Expand Down Expand Up @@ -541,6 +542,7 @@ def infer_diffusers_model_type(checkpoint):
def fetch_diffusers_config(checkpoint):
model_type = infer_diffusers_model_type(checkpoint)
model_path = DIFFUSERS_DEFAULT_PIPELINE_PATHS[model_type]
model_path = copy.deepcopy(model_path)
Copy link
Collaborator Author

@yiyixuxu yiyixuxu Sep 24, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

making a copy that we won't accidently modify the DIFFUSERS_DEFAULT_PIPELINE_PATHS

so here is an example, we modified the output of fetch_diffusers_config in place

subfolder = subfolder or config.pop(

this is likely to happen if it had not already, so we should go through the single file code and see if there are more places might cause this type of errors


return model_path

Expand Down
54 changes: 29 additions & 25 deletions tests/lora/test_lora_layers_sd.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def tearDown(self):
@slow
@require_torch_gpu
def test_integration_move_lora_cpu(self):
path = "Jiali/stable-diffusion-1.5"
path = "stable-diffusion-v1-5/stable-diffusion-v1-5"
lora_id = "takuma104/lora-test-text-encoder-lora-target"

pipe = StableDiffusionPipeline.from_pretrained(path, torch_dtype=torch.float16)
Expand Down Expand Up @@ -161,7 +161,7 @@ def test_integration_move_lora_cpu(self):
def test_integration_move_lora_dora_cpu(self):
from peft import LoraConfig

path = "Jiali/stable-diffusion-1.5"
path = "stable-diffusion-v1-5/stable-diffusion-v1-5"
unet_lora_config = LoraConfig(
init_lora_weights="gaussian",
target_modules=["to_k", "to_q", "to_v", "to_out.0"],
Expand Down Expand Up @@ -221,7 +221,7 @@ def tearDown(self):
torch.cuda.empty_cache()

def test_integration_logits_with_scale(self):
path = "Jiali/stable-diffusion-1.5"
path = "stable-diffusion-v1-5/stable-diffusion-v1-5"
lora_id = "takuma104/lora-test-text-encoder-lora-target"

pipe = StableDiffusionPipeline.from_pretrained(path, torch_dtype=torch.float32)
Expand Down Expand Up @@ -253,7 +253,7 @@ def test_integration_logits_with_scale(self):
release_memory(pipe)

def test_integration_logits_no_scale(self):
path = "Jiali/stable-diffusion-1.5"
path = "stable-diffusion-v1-5/stable-diffusion-v1-5"
lora_id = "takuma104/lora-test-text-encoder-lora-target"

pipe = StableDiffusionPipeline.from_pretrained(path, torch_dtype=torch.float32)
Expand Down Expand Up @@ -284,7 +284,7 @@ def test_dreambooth_old_format(self):

lora_model_id = "hf-internal-testing/lora_dreambooth_dog_example"

base_model_id = "Jiali/stable-diffusion-1.5"
base_model_id = "stable-diffusion-v1-5/stable-diffusion-v1-5"

pipe = StableDiffusionPipeline.from_pretrained(base_model_id, safety_checker=None)
pipe = pipe.to(torch_device)
Expand All @@ -308,7 +308,7 @@ def test_dreambooth_text_encoder_new_format(self):

lora_model_id = "hf-internal-testing/lora-trained"

base_model_id = "Jiali/stable-diffusion-1.5"
base_model_id = "stable-diffusion-v1-5/stable-diffusion-v1-5"

pipe = StableDiffusionPipeline.from_pretrained(base_model_id, safety_checker=None)
pipe = pipe.to(torch_device)
Expand Down Expand Up @@ -419,9 +419,9 @@ def test_a1111_with_sequential_cpu_offload(self):
def test_kohya_sd_v15_with_higher_dimensions(self):
generator = torch.Generator().manual_seed(0)

pipe = StableDiffusionPipeline.from_pretrained("Jiali/stable-diffusion-1.5", safety_checker=None).to(
torch_device
)
pipe = StableDiffusionPipeline.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None
).to(torch_device)
lora_model_id = "hf-internal-testing/urushisato-lora"
lora_filename = "urushisato_v15.safetensors"
pipe.load_lora_weights(lora_model_id, weight_name=lora_filename)
Expand All @@ -444,7 +444,7 @@ def test_vanilla_funetuning(self):

lora_model_id = "hf-internal-testing/sd-model-finetuned-lora-t4"

base_model_id = "Jiali/stable-diffusion-1.5"
base_model_id = "stable-diffusion-v1-5/stable-diffusion-v1-5"

pipe = StableDiffusionPipeline.from_pretrained(base_model_id, safety_checker=None)
pipe = pipe.to(torch_device)
Expand All @@ -467,9 +467,9 @@ def test_unload_kohya_lora(self):
prompt = "masterpiece, best quality, mountain"
num_inference_steps = 2

pipe = StableDiffusionPipeline.from_pretrained("Jiali/stable-diffusion-1.5", safety_checker=None).to(
torch_device
)
pipe = StableDiffusionPipeline.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None
).to(torch_device)
initial_images = pipe(
prompt, output_type="np", generator=generator, num_inference_steps=num_inference_steps
).images
Expand Down Expand Up @@ -505,9 +505,9 @@ def test_load_unload_load_kohya_lora(self):
prompt = "masterpiece, best quality, mountain"
num_inference_steps = 2

pipe = StableDiffusionPipeline.from_pretrained("Jiali/stable-diffusion-1.5", safety_checker=None).to(
torch_device
)
pipe = StableDiffusionPipeline.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None
).to(torch_device)
initial_images = pipe(
prompt, output_type="np", generator=generator, num_inference_steps=num_inference_steps
).images
Expand Down Expand Up @@ -547,9 +547,9 @@ def test_load_unload_load_kohya_lora(self):

def test_not_empty_state_dict(self):
# Makes sure https://github.com/huggingface/diffusers/issues/7054 does not happen again
pipe = AutoPipelineForText2Image.from_pretrained("Jiali/stable-diffusion-1.5", torch_dtype=torch.float16).to(
torch_device
)
pipe = AutoPipelineForText2Image.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", torch_dtype=torch.float16
).to(torch_device)
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)

cached_file = hf_hub_download("hf-internal-testing/lcm-lora-test-sd-v1-5", "test_lora.safetensors")
Expand All @@ -561,9 +561,9 @@ def test_not_empty_state_dict(self):

def test_load_unload_load_state_dict(self):
# Makes sure https://github.com/huggingface/diffusers/issues/7054 does not happen again
pipe = AutoPipelineForText2Image.from_pretrained("Jiali/stable-diffusion-1.5", torch_dtype=torch.float16).to(
torch_device
)
pipe = AutoPipelineForText2Image.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", torch_dtype=torch.float16
).to(torch_device)
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)

cached_file = hf_hub_download("hf-internal-testing/lcm-lora-test-sd-v1-5", "test_lora.safetensors")
Expand All @@ -580,7 +580,9 @@ def test_load_unload_load_state_dict(self):
release_memory(pipe)

def test_sdv1_5_lcm_lora(self):
pipe = DiffusionPipeline.from_pretrained("Jiali/stable-diffusion-1.5", torch_dtype=torch.float16)
pipe = DiffusionPipeline.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", torch_dtype=torch.float16
)
pipe.to(torch_device)
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)

Expand Down Expand Up @@ -608,7 +610,9 @@ def test_sdv1_5_lcm_lora(self):
release_memory(pipe)

def test_sdv1_5_lcm_lora_img2img(self):
pipe = AutoPipelineForImage2Image.from_pretrained("Jiali/stable-diffusion-1.5", torch_dtype=torch.float16)
pipe = AutoPipelineForImage2Image.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", torch_dtype=torch.float16
)
pipe.to(torch_device)
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)

Expand Down Expand Up @@ -649,7 +653,7 @@ def test_sd_load_civitai_empty_network_alpha(self):
This test simply checks that loading a LoRA with an empty network alpha works fine
See: https://github.com/huggingface/diffusers/issues/5606
"""
pipeline = StableDiffusionPipeline.from_pretrained("Jiali/stable-diffusion-1.5")
pipeline = StableDiffusionPipeline.from_pretrained("stable-diffusion-v1-5/stable-diffusion-v1-5")
pipeline.enable_sequential_cpu_offload()
civitai_path = hf_hub_download("ybelkada/test-ahi-civitai", "ahi_lora_weights.safetensors")
pipeline.load_lora_weights(civitai_path, adapter_name="ahri")
Expand Down
8 changes: 5 additions & 3 deletions tests/models/autoencoders/test_models_vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -1051,7 +1051,9 @@ def test_encode_decode(self):

def test_sd(self):
vae = ConsistencyDecoderVAE.from_pretrained("openai/consistency-decoder") # TODO - update
pipe = StableDiffusionPipeline.from_pretrained("Jiali/stable-diffusion-1.5", vae=vae, safety_checker=None)
pipe = StableDiffusionPipeline.from_pretrained(
"stable-diffusion-v1-5/stable-diffusion-v1-5", vae=vae, safety_checker=None
)
pipe.to(torch_device)

out = pipe(
Expand Down Expand Up @@ -1099,7 +1101,7 @@ def test_sd_f16(self):
"openai/consistency-decoder", torch_dtype=torch.float16
) # TODO - update
pipe = StableDiffusionPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5",
"stable-diffusion-v1-5/stable-diffusion-v1-5",
torch_dtype=torch.float16,
vae=vae,
safety_checker=None,
Expand All @@ -1124,7 +1126,7 @@ def test_sd_f16(self):
def test_vae_tiling(self):
vae = ConsistencyDecoderVAE.from_pretrained("openai/consistency-decoder", torch_dtype=torch.float16)
pipe = StableDiffusionPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", vae=vae, safety_checker=None, torch_dtype=torch.float16
"stable-diffusion-v1-5/stable-diffusion-v1-5", vae=vae, safety_checker=None, torch_dtype=torch.float16
)
pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)
Expand Down
30 changes: 16 additions & 14 deletions tests/pipelines/controlnet/test_controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _test_stable_diffusion_compile(in_queue, out_queue, timeout):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.to("cuda")
pipe.set_progress_bar_config(disable=None)
Expand Down Expand Up @@ -715,7 +715,7 @@ def test_canny(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -742,7 +742,7 @@ def test_depth(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-depth")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -769,7 +769,7 @@ def test_hed(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-hed")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -796,7 +796,7 @@ def test_mlsd(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-mlsd")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -823,7 +823,7 @@ def test_normal(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-normal")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -850,7 +850,7 @@ def test_openpose(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-openpose")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -877,7 +877,7 @@ def test_scribble(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-scribble")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand All @@ -904,7 +904,7 @@ def test_seg(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-seg")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand Down Expand Up @@ -935,7 +935,7 @@ def test_sequential_cpu_offloading(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-seg")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.set_progress_bar_config(disable=None)
pipe.enable_attention_slicing()
Expand All @@ -961,7 +961,7 @@ def test_canny_guess_mode(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand Down Expand Up @@ -993,7 +993,7 @@ def test_canny_guess_mode_euler(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
pipe.enable_model_cpu_offload()
Expand Down Expand Up @@ -1035,7 +1035,7 @@ def test_v11_shuffle_global_pool_conditions(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11e_sd15_shuffle")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand Down Expand Up @@ -1081,7 +1081,9 @@ def test_pose_and_canny(self):
controlnet_pose = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-openpose")

pipe = StableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=[controlnet_pose, controlnet_canny]
"stable-diffusion-v1-5/stable-diffusion-v1-5",
safety_checker=None,
controlnet=[controlnet_pose, controlnet_canny],
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand Down
2 changes: 1 addition & 1 deletion tests/pipelines/controlnet/test_controlnet_img2img.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def test_canny(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny")

pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.enable_model_cpu_offload()
pipe.set_progress_bar_config(disable=None)
Expand Down
2 changes: 1 addition & 1 deletion tests/pipelines/controlnet/test_controlnet_inpaint.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,7 +504,7 @@ def test_inpaint(self):
controlnet = ControlNetModel.from_pretrained("lllyasviel/control_v11p_sd15_inpaint")

pipe = StableDiffusionControlNetInpaintPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", safety_checker=None, controlnet=controlnet
"stable-diffusion-v1-5/stable-diffusion-v1-5", safety_checker=None, controlnet=controlnet
)
pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config)
pipe.enable_model_cpu_offload()
Expand Down
4 changes: 2 additions & 2 deletions tests/pipelines/controlnet/test_flax_controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def test_canny(self):
"lllyasviel/sd-controlnet-canny", from_pt=True, dtype=jnp.bfloat16
)
pipe, params = FlaxStableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", controlnet=controlnet, from_pt=True, dtype=jnp.bfloat16
"stable-diffusion-v1-5/stable-diffusion-v1-5", controlnet=controlnet, from_pt=True, dtype=jnp.bfloat16
)
params["controlnet"] = controlnet_params

Expand Down Expand Up @@ -86,7 +86,7 @@ def test_pose(self):
"lllyasviel/sd-controlnet-openpose", from_pt=True, dtype=jnp.bfloat16
)
pipe, params = FlaxStableDiffusionControlNetPipeline.from_pretrained(
"Jiali/stable-diffusion-1.5", controlnet=controlnet, from_pt=True, dtype=jnp.bfloat16
"stable-diffusion-v1-5/stable-diffusion-v1-5", controlnet=controlnet, from_pt=True, dtype=jnp.bfloat16
)
params["controlnet"] = controlnet_params

Expand Down
Loading
Loading