Skip to content

Commit 6ba2b76

Browse files
committed
fix
Signed-off-by: Raphael Glon <[email protected]>
1 parent 72787a5 commit 6ba2b76

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

src/huggingface_inference_toolkit/diffusers_utils.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ def load_optimum_diffusion_pipeline(task, model_dir):
7070
pipeline_class_name = config['_class_name']
7171

7272
logger.debug("Repository pipeline class name %s", pipeline_class_name)
73-
if pipeline_class_name.contains("Diffusion") and pipeline_class_name.contains("XL"):
73+
if "Diffusion" in pipeline_class_name and "XL" in pipeline_class_name:
7474
if task == "image-to-image":
7575
pipeline_class = neuron.NeuronStableDiffusionXLImg2ImgPipeline
7676
else:
@@ -84,7 +84,7 @@ def load_optimum_diffusion_pipeline(task, model_dir):
8484
logger.debug("Pipeline class %s", pipeline_class.__class__)
8585

8686
# if is neuron model, no need for additional kwargs
87-
if pipeline_class_name.contains("Neuron"):
87+
if "Neuron" in pipeline_class_name:
8888
kwargs = {}
8989
else:
9090
# Model will be compiled and exported on the flight as the cached models cause a performance drop
@@ -99,8 +99,8 @@ def load_optimum_diffusion_pipeline(task, model_dir):
9999
"data_parallel_mode": os.environ.get("DATA_PARALLEL_MODE", "unet")
100100
}
101101
input_shapes = {"batch_size": 1,
102-
"height": int(os.environ("IMAGE_HEIGHT", 512)),
103-
"width": int(os.environ("IMAGE_WIDTH", 512))}
102+
"height": int(os.environ.get("IMAGE_HEIGHT", 512)),
103+
"width": int(os.environ.get("IMAGE_WIDTH", 512))}
104104
kwargs = {**compiler_args, **input_shapes, "export": True}
105105

106106
# In the second case, exporting can take a huge amount of time, which makes endpoints not a really suited solution

0 commit comments

Comments
 (0)