77from typing import Optional , Union
88
99from huggingface_hub import HfApi , login , snapshot_download
10-
1110from transformers import WhisperForConditionalGeneration , pipeline
1211from transformers .file_utils import is_tf_available , is_torch_available
1312from transformers .pipelines import Pipeline
1716 is_diffusers_available ,
1817)
1918from huggingface_inference_toolkit .logging import logger
19+ from huggingface_inference_toolkit .optimum_utils import (
20+ get_optimum_neuron_pipeline ,
21+ is_optimum_neuron_available ,
22+ )
2023from huggingface_inference_toolkit .sentence_transformers_utils import (
2124 get_sentence_transformers_pipeline ,
2225 is_sentence_transformers_available ,
2326)
2427from huggingface_inference_toolkit .utils import create_artifact_filter
25- from huggingface_inference_toolkit .optimum_utils import (
26- get_optimum_neuron_pipeline ,
27- is_optimum_neuron_available ,
28- )
2928
3029
3130def load_repository_from_hf (
@@ -185,4 +184,4 @@ def get_pipeline(
185184 hf_pipeline .model .config .forced_decoder_ids = hf_pipeline .tokenizer .get_decoder_prompt_ids (
186185 language = "english" , task = "transcribe"
187186 )
188- return hf_pipeline # type: ignore
187+ return hf_pipeline # type: ignore
0 commit comments