Skip to content

Commit 01b6b2c

Browse files
authored
Remove unneeded pragma: lax no cover (#2177)
1 parent 2b7899b commit 01b6b2c

File tree

14 files changed

+23
-23
lines changed

14 files changed

+23
-23
lines changed

pydantic_ai_slim/pydantic_ai/_output.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -664,7 +664,7 @@ async def process(
664664
)
665665
raise ToolRetryError(m) from e
666666
else:
667-
raise # pragma: lax no cover
667+
raise
668668

669669
if k := self.outer_typed_dict_key:
670670
output = output[k]
@@ -679,7 +679,7 @@ async def process(
679679
)
680680
raise ToolRetryError(m) from r
681681
else:
682-
raise # pragma: lax no cover
682+
raise
683683

684684
return output
685685

@@ -849,7 +849,7 @@ async def process(
849849
)
850850
raise ToolRetryError(m) from r
851851
else:
852-
raise # pragma: lax no cover
852+
raise # pragma: no cover
853853

854854
return cast(OutputDataT, output)
855855

@@ -908,7 +908,7 @@ async def process(
908908
)
909909
raise ToolRetryError(m) from e
910910
else:
911-
raise # pragma: lax no cover
911+
raise # pragma: no cover
912912
except ModelRetry as r:
913913
if wrap_validation_errors:
914914
m = _messages.RetryPromptPart(
@@ -918,7 +918,7 @@ async def process(
918918
)
919919
raise ToolRetryError(m) from r
920920
else:
921-
raise # pragma: lax no cover
921+
raise # pragma: no cover
922922
else:
923923
return output
924924

pydantic_ai_slim/pydantic_ai/exceptions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@
44
import sys
55

66
if sys.version_info < (3, 11):
7-
from exceptiongroup import ExceptionGroup # pragma: lax no cover
7+
from exceptiongroup import ExceptionGroup
88
else:
9-
ExceptionGroup = ExceptionGroup # pragma: lax no cover
9+
ExceptionGroup = ExceptionGroup
1010

1111
__all__ = (
1212
'ModelRetry',

pydantic_ai_slim/pydantic_ai/models/anthropic.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@ async def _messages_create(
256256
except APIStatusError as e:
257257
if (status_code := e.status_code) >= 400:
258258
raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e
259-
raise # pragma: lax no cover
259+
raise # pragma: no cover
260260

261261
def _process_response(self, response: BetaMessage) -> ModelResponse:
262262
"""Process a non-streamed response, and prepare a message to return."""

pydantic_ai_slim/pydantic_ai/models/bedrock.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -663,4 +663,4 @@ async def __anext__(self) -> T:
663663
if type(e.__cause__) is StopIteration:
664664
raise StopAsyncIteration
665665
else:
666-
raise e # pragma: lax no cover
666+
raise e # pragma: no cover

pydantic_ai_slim/pydantic_ai/models/cohere.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ async def _chat(
183183
except ApiError as e:
184184
if (status_code := e.status_code) and status_code >= 400:
185185
raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e
186-
raise # pragma: lax no cover
186+
raise # pragma: no cover
187187

188188
def _process_response(self, response: ChatResponse) -> ModelResponse:
189189
"""Process a non-streamed response, and prepare a message to return."""

pydantic_ai_slim/pydantic_ai/models/gemini.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ async def _make_request(
253253

254254
if gemini_labels := model_settings.get('gemini_labels'):
255255
if self._system == 'google-vertex':
256-
request_data['labels'] = gemini_labels # pragma: lax no cover
256+
request_data['labels'] = gemini_labels
257257

258258
headers = {'Content-Type': 'application/json', 'User-Agent': get_user_agent()}
259259
url = f'/{self._model_name}:{"streamGenerateContent" if streamed else "generateContent"}'
@@ -415,7 +415,7 @@ def _settings_to_generation_config(model_settings: GeminiModelSettings) -> _Gemi
415415
if (frequency_penalty := model_settings.get('frequency_penalty')) is not None:
416416
config['frequency_penalty'] = frequency_penalty
417417
if (thinkingConfig := model_settings.get('gemini_thinking_config')) is not None:
418-
config['thinking_config'] = thinkingConfig # pragma: lax no cover
418+
config['thinking_config'] = thinkingConfig
419419
return config
420420

421421

pydantic_ai_slim/pydantic_ai/models/google.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def __init__(
166166
self._model_name = model_name
167167

168168
if isinstance(provider, str):
169-
provider = GoogleProvider(vertexai=provider == 'google-vertex') # pragma: lax no cover
169+
provider = GoogleProvider(vertexai=provider == 'google-vertex')
170170

171171
self._provider = provider
172172
self._system = provider.name

pydantic_ai_slim/pydantic_ai/models/groq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ async def _completions_create(
248248
except APIStatusError as e:
249249
if (status_code := e.status_code) >= 400:
250250
raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e
251-
raise # pragma: lax no cover
251+
raise # pragma: no cover
252252

253253
def _process_response(self, response: chat.ChatCompletion) -> ModelResponse:
254254
"""Process a non-streamed response, and prepare a message to return."""

pydantic_ai_slim/pydantic_ai/models/instrumented.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ def __init__(
138138
**tokens_histogram_kwargs,
139139
explicit_bucket_boundaries_advisory=TOKEN_HISTOGRAM_BOUNDARIES,
140140
)
141-
except TypeError: # pragma: lax no cover
141+
except TypeError:
142142
# Older OTel/logfire versions don't support explicit_bucket_boundaries_advisory
143143
self.tokens_histogram = self.meter.create_histogram(
144144
**tokens_histogram_kwargs, # pyright: ignore

pydantic_ai_slim/pydantic_ai/models/mistral.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@
7575
from mistralai.models.usermessage import UserMessage as MistralUserMessage
7676
from mistralai.types.basemodel import Unset as MistralUnset
7777
from mistralai.utils.eventstreaming import EventStreamAsync as MistralEventStreamAsync
78-
except ImportError as e: # pragma: lax no cover
78+
except ImportError as e: # pragma: no cover
7979
raise ImportError(
8080
'Please install `mistral` to use the Mistral model, '
8181
'you can use the `mistral` optional group — `pip install "pydantic-ai-slim[mistral]"`'
@@ -217,7 +217,7 @@ async def _completions_create(
217217
except SDKError as e:
218218
if (status_code := e.status_code) >= 400:
219219
raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e
220-
raise # pragma: lax no cover
220+
raise # pragma: no cover
221221

222222
assert response, 'A unexpected empty response from Mistral.'
223223
return response

0 commit comments

Comments
 (0)