File tree Expand file tree Collapse file tree 2 files changed +7
-3
lines changed
pydantic_ai_slim/pydantic_ai/models Expand file tree Collapse file tree 2 files changed +7
-3
lines changed Original file line number Diff line number Diff line change 11# There are linting and coverage escapes for MLXLM and VLLMOffline as the CI would not contain the right
22# environment to be able to run the associated tests
33
4+ # pyright: reportUnnecessaryTypeIgnoreComment = false
5+
46from __future__ import annotations
57
68import io
Original file line number Diff line number Diff line change 11# There are linting escapes for vllm offline as the CI would not contain the right
22# environment to load the associated dependencies
33
4+ # pyright: reportUnnecessaryTypeIgnoreComment = false
5+
46from __future__ import annotations as _annotations
57
68import json
5052with try_import () as llama_cpp_imports_successful :
5153 import llama_cpp
5254
53- with try_import () as vllm_imports_successful : # pragma: no cover
55+ with try_import () as vllm_imports_successful :
5456 import vllm # type: ignore[reportMissingImports]
5557
5658 # We try to load the vllm model to ensure it is available
57- try :
59+ try : # pragma: no cover
5860 vllm .LLM ('microsoft/Phi-3-mini-4k-instruct' ) # type: ignore
59- except RuntimeError as e :
61+ except RuntimeError as e : # pragma: no cover
6062 if 'Found no NVIDIA driver' in str (e ):
6163 # Treat as import failure
6264 raise ImportError ('CUDA/NVIDIA driver not available' ) from e
You can’t perform that action at this time.
0 commit comments