Skip to content

Commit a0369b4

Browse files
authored
Merge pull request #185 from intel/update_sdengine_init
Update StableDiffusionEngine initialization
2 parents 9991174 + 52af45a commit a0369b4

File tree

5 files changed

+51
-24
lines changed

5 files changed

+51
-24
lines changed

Docs/user_guide_for_windows_users.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
- python 3.9-3.12
2222
- Note: This document will use python 3.9.13 as an example.
2323
- VC runtime
24-
- [GIMP 3.0.2](https://download.gimp.org/gimp/v3.0/windows/gimp-3.0.2-setup-1.exe)
24+
- [GIMP 3.0.4](https://download.gimp.org/gimp/v3.0/windows/gimp-3.0.4-setup.exe)
2525
- [GIMP AI plugins with OpenVINO™ Backend](https://github.com/intel/openvino-ai-plugins-gimp) from Github.
2626

2727

@@ -72,9 +72,9 @@ Please download the latest Visual C++ Redistributable package from MSFT [site](h
7272
![](figs/VC_runtime_close.png)
7373

7474

75-
### Install GIMP 3.0.2
75+
### Install GIMP 3.0.4
7676

77-
Please download [GIMP 3.0.2](https://download.gimp.org/gimp/v3.0/windows/gimp-3.0.2-setup-1.exe) and follow below steps to install GIMP.
77+
Please download [GIMP 3.0.4](https://download.gimp.org/gimp/v3.0/windows/gimp-3.0.4-setup.exe) and follow below steps to install GIMP.
7878

7979
- Click "Install for all users (recommended)"
8080

gimpopenvino/plugins/openvino_utils/tools/model_manager.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -515,8 +515,6 @@ def is_model_installed(self, model_id):
515515
"best performance" : ["GPU","GPU","GPU"]
516516
}
517517

518-
519-
520518
npu_is_available = self._npu_is_available
521519
npu_arch = self._npu_arch
522520

gimpopenvino/plugins/openvino_utils/tools/openvino_common/models_ov/stable_diffusion_engine.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def __init__(self, model="runwayml/stable-diffusion-v1-5",
120120
if "NPU" in device:
121121
try_enable_npu_turbo(device, self.core)
122122

123-
print("Loading models... ")
123+
print("Loading models ... int8 ")
124124

125125

126126

@@ -455,15 +455,15 @@ def __init__(
455455
self,
456456
model="bes-dev/stable-diffusion-v1-4-openvino",
457457
tokenizer="openai/clip-vit-large-patch14",
458-
device=["CPU","CPU","CPU","CPU"]):
458+
device=["CPU","CPU","CPU","CPU"], model_name="fp16"):
459459

460460
self.core = Core()
461461
self.core.set_property({'CACHE_DIR': os.path.join(model, 'cache')})
462462

463463
batch_size = 2 if device[1] == device[2] and device[1] == "GPU" else 1
464464

465465
# if 'int8' is in model, then we are using unet_int8a16 model, and for this we will always use batch size 1.
466-
if "int8" in model:
466+
if "int8" in model_name:
467467
batch_size = 1
468468

469469
self.batch_size = batch_size
@@ -477,21 +477,24 @@ def __init__(
477477
self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer)
478478
self.tokenizer.save_pretrained(model)
479479

480-
print("Loading models... ")
480+
481481

482482
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
483483
text_future = executor.submit(self.load_model, model, "text_encoder", device[0])
484484
vae_de_future = executor.submit(self.load_model, model, "vae_decoder", device[3])
485485
vae_en_future = executor.submit(self.load_model, model, "vae_encoder", device[3])
486486

487487
if self.batch_size == 1:
488-
if "int8" not in model:
489-
unet_future = executor.submit(self.load_model, model, "unet_bs1", device[1])
490-
unet_neg_future = executor.submit(self.load_model, model, "unet_bs1", device[2]) if device[1] != device[2] else None
491-
else:
492-
unet_future = executor.submit(self.load_model, model, "unet_int8a16", device[1])
488+
if "int8a16" in model_name:
489+
print("Loading models ... int8a16")
490+
unet_future = executor.submit(self.load_model, model, "unet_int8a16", device[1])
493491
unet_neg_future = executor.submit(self.load_model, model, "unet_int8a16", device[2]) if device[1] != device[2] else None
492+
else:
493+
print("Loading models ... fp16 bs1")
494+
unet_future = executor.submit(self.load_model, model, "unet_bs1", device[1])
495+
unet_neg_future = executor.submit(self.load_model, model, "unet_bs1", device[2]) if device[1] != device[2] else None
494496
else:
497+
print("Loading models ... fp16")
495498
unet_future = executor.submit(self.load_model, model, "unet", device[1])
496499
unet_neg_future = None
497500

gimpopenvino/plugins/openvino_utils/tools/stable_diffusion_ov_server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ def initialize_engine(model_name, model_path, device_list):
217217
return controlnet_openpose.ControlNetOpenPose(model=model_path, device=device_list)
218218
if model_name == "controlnet_referenceonly":
219219
return stable_diffusion_engine.StableDiffusionEngineReferenceOnly(model=model_path, device=device_list)
220-
return stable_diffusion_engine.StableDiffusionEngine(model=model_path, device=device_list)
220+
return stable_diffusion_engine.StableDiffusionEngine(model=model_path, device=device_list, model_name=model_name)
221221

222222
def handle_client_data(data, conn, engine, model_name, model_path, scheduler):
223223
if data.decode() == "kill":

testscases/StableDiffusion/stable_diffusion_engine_tc.py

Lines changed: 35 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -123,27 +123,21 @@ def print_system_info():
123123

124124
def initialize_engine(model_name, model_path, device_list):
125125
if model_name == "sd_1.5_square_int8":
126-
log.info('Device list: %s', device_list)
127126
return stable_diffusion_engine.StableDiffusionEngineAdvanced(model=model_path, device=device_list)
128127
if model_name == "sd_3.0_square":
129128
device_list = ["GPU"]
130-
log.info('Device list: %s', device_list)
131129
return stable_diffusion_3.StableDiffusionThreeEngine(model=model_path, device=device_list)
132130
if model_name == "sd_1.5_inpainting":
133131
return stable_diffusion_engine_inpainting_genai.StableDiffusionEngineInpaintingGenai(model=model_path, device=device_list[0])
134132
if model_name in ("sd_1.5_square_lcm","sdxl_base_1.0_square","sdxl_turbo_square","sd_3.0_med_diffuser_square","sd_3.5_med_turbo_square"):
135133
return stable_diffusion_engine_genai.StableDiffusionEngineGenai(model=model_path,model_name=model_name,device=device_list)
136134
if model_name == "sd_1.5_inpainting_int8":
137-
log.info('Advanced Inpainting Device list: %s', device_list)
138135
return stable_diffusion_engine_inpainting_advanced.StableDiffusionEngineInpaintingAdvanced(model=model_path, device=device_list)
139136
if model_name == "controlnet_openpose_int8":
140-
log.info('Device list: %s', device_list)
141137
return controlnet_openpose_advanced.ControlNetOpenPoseAdvanced(model=model_path, device=device_list)
142138
if model_name == "controlnet_canny_int8":
143-
log.info('Device list: %s', device_list)
144139
return controlnet_cannyedge_advanced.ControlNetCannyEdgeAdvanced(model=model_path, device=device_list)
145140
if model_name == "controlnet_scribble_int8":
146-
log.info('Device list: %s', device_list)
147141
return controlnet_scribble.ControlNetScribbleAdvanced(model=model_path, device=device_list)
148142
if model_name == "controlnet_canny":
149143
return controlnet_canny_edge.ControlNetCannyEdge(model=model_path, device=device_list)
@@ -153,14 +147,16 @@ def initialize_engine(model_name, model_path, device_list):
153147
return controlnet_openpose.ControlNetOpenPose(model=model_path, device=device_list)
154148
if model_name == "controlnet_referenceonly":
155149
return stable_diffusion_engine.StableDiffusionEngineReferenceOnly(model=model_path, device=device_list)
156-
return stable_diffusion_engine.StableDiffusionEngine(model=model_path, device=device_list)
150+
return stable_diffusion_engine.StableDiffusionEngine(model=model_path, device=device_list, model_name=model_name)
157151

158152
def parse_args() -> argparse.Namespace:
159153
"""Parse and return command line arguments."""
160154
parser = argparse.ArgumentParser(add_help=False, formatter_class=argparse.RawTextHelpFormatter)
161155
args = parser.add_argument_group('Options')
162156
args.add_argument('-h', '--help', action = 'help',
163157
help='Show this help message and exit.')
158+
args.add_argument('-l', '--list', action = 'store_true',
159+
help='Show list of models currently installed.')
164160
# base path to models
165161
args.add_argument('-bp','--model_base_path',type = str, default = None, required = False,
166162
help='Optional. Specify the absolute base path to model weights. \nUsage example: -bp \\stable-diffusion\\model-weights\\')
@@ -194,8 +190,6 @@ def parse_args() -> argparse.Namespace:
194190
# guidance scale
195191
args.add_argument('-g','--guidance_scale',type = float, default = 7.5, required = False,
196192
help='Optional. Affects how closely the image prompt is followed.')
197-
198-
199193
# power mode
200194
args.add_argument('-pm','--power_mode',type = str, default = "best performance", required = False,
201195
help='Optional. Specify the power mode. Default is best performance')
@@ -209,6 +203,32 @@ def parse_args() -> argparse.Namespace:
209203

210204
return parser.parse_args()
211205

206+
def validate_model_paths(base_path: str, model_paths: dict) -> dict:
207+
"""
208+
Check if model directories exist based on base_path and model_paths structure.
209+
210+
Args:
211+
base_path (str): Root directory where models are stored.
212+
model_paths (dict): Dictionary with model keys and relative path parts.
213+
214+
Returns:
215+
dict: Dictionary with model names and a boolean indicating existence.
216+
"""
217+
results = {}
218+
for model_name, relative_parts in model_paths.items():
219+
full_path = os.path.join(base_path, *relative_parts)
220+
if os.path.isdir(full_path):
221+
if "int8a16" in model_name:
222+
if os.path.isfile(os.path.join(full_path, "unet_int8a16.xml")):
223+
results[model_name] = full_path
224+
elif "fp8" in model_name:
225+
if os.path.isfile(os.path.join(full_path, "unet_fp8.xml")):
226+
results[model_name] = full_path
227+
else:
228+
results[model_name] = full_path
229+
return results
230+
231+
212232
def main():
213233
args = parse_args()
214234
results = []
@@ -253,6 +273,12 @@ def main():
253273
"controlnet_scribble_int8": ["stable-diffusion-ov", "controlnet-scribble-int8"],
254274
}
255275

276+
if args.list:
277+
print(f"\nInstalled models: ")
278+
for key in validate_model_paths(weight_path, model_paths).keys():
279+
print(f"{key}")
280+
exit()
281+
256282
model_name = args.model_name
257283
model_path = os.path.join(weight_path, *model_paths.get(model_name))
258284
model_config_file_name = os.path.join(model_path, "config.json")

0 commit comments

Comments
 (0)