Skip to content

Commit d940e66

Browse files
author
meorphis
committed
chore: format
1 parent 1a66126 commit d940e66

File tree

4 files changed

+36
-101
lines changed

4 files changed

+36
-101
lines changed

README.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,9 @@ api_client = GradientAI(
3232
api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted
3333
)
3434
inference_client = GradientAI(
35-
inference_key=os.environ.get("GRADIENTAI_INFERENCE_KEY"), # This is the default and can be omitted
35+
inference_key=os.environ.get(
36+
"GRADIENTAI_INFERENCE_KEY"
37+
), # This is the default and can be omitted
3638
)
3739
agent_client = GradientAI(
3840
agent_key=os.environ.get("GRADIENTAI_AGENT_KEY"), # This is the default and can be omitted
@@ -51,7 +53,6 @@ completion = inference_client.chat.completions.create(
5153
)
5254

5355
print(completion.choices[0].message)
54-
5556
```
5657

5758
While you can provide an `api_key`, `inference_key` keyword argument,

src/gradientai/_client.py

Lines changed: 16 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -294,9 +294,7 @@ def default_headers(self) -> dict[str, str | Omit]:
294294

295295
@override
296296
def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
297-
if (self.api_key or self.agent_key or self.inference_key) and headers.get(
298-
"Authorization"
299-
):
297+
if (self.api_key or self.agent_key or self.inference_key) and headers.get("Authorization"):
300298
return
301299
if isinstance(custom_headers.get("Authorization"), Omit):
302300
return
@@ -326,14 +324,10 @@ def copy(
326324
Create a new client instance re-using the same options given to the current client with optional overriding.
327325
"""
328326
if default_headers is not None and set_default_headers is not None:
329-
raise ValueError(
330-
"The `default_headers` and `set_default_headers` arguments are mutually exclusive"
331-
)
327+
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
332328

333329
if default_query is not None and set_default_query is not None:
334-
raise ValueError(
335-
"The `default_query` and `set_default_query` arguments are mutually exclusive"
336-
)
330+
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
337331

338332
headers = self._custom_headers
339333
if default_headers is not None:
@@ -380,14 +374,10 @@ def _make_status_error(
380374
return _exceptions.BadRequestError(err_msg, response=response, body=body)
381375

382376
if response.status_code == 401:
383-
return _exceptions.AuthenticationError(
384-
err_msg, response=response, body=body
385-
)
377+
return _exceptions.AuthenticationError(err_msg, response=response, body=body)
386378

387379
if response.status_code == 403:
388-
return _exceptions.PermissionDeniedError(
389-
err_msg, response=response, body=body
390-
)
380+
return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
391381

392382
if response.status_code == 404:
393383
return _exceptions.NotFoundError(err_msg, response=response, body=body)
@@ -396,17 +386,13 @@ def _make_status_error(
396386
return _exceptions.ConflictError(err_msg, response=response, body=body)
397387

398388
if response.status_code == 422:
399-
return _exceptions.UnprocessableEntityError(
400-
err_msg, response=response, body=body
401-
)
389+
return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
402390

403391
if response.status_code == 429:
404392
return _exceptions.RateLimitError(err_msg, response=response, body=body)
405393

406394
if response.status_code >= 500:
407-
return _exceptions.InternalServerError(
408-
err_msg, response=response, body=body
409-
)
395+
return _exceptions.InternalServerError(err_msg, response=response, body=body)
410396
return APIStatusError(err_msg, response=response, body=body)
411397

412398

@@ -618,9 +604,7 @@ def default_headers(self) -> dict[str, str | Omit]:
618604

619605
@override
620606
def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
621-
if (self.api_key or self.agent_key or self.inference_key) and headers.get(
622-
"Authorization"
623-
):
607+
if (self.api_key or self.agent_key or self.inference_key) and headers.get("Authorization"):
624608
return
625609
if isinstance(custom_headers.get("Authorization"), Omit):
626610
return
@@ -650,14 +634,10 @@ def copy(
650634
Create a new client instance re-using the same options given to the current client with optional overriding.
651635
"""
652636
if default_headers is not None and set_default_headers is not None:
653-
raise ValueError(
654-
"The `default_headers` and `set_default_headers` arguments are mutually exclusive"
655-
)
637+
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
656638

657639
if default_query is not None and set_default_query is not None:
658-
raise ValueError(
659-
"The `default_query` and `set_default_query` arguments are mutually exclusive"
660-
)
640+
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
661641

662642
headers = self._custom_headers
663643
if default_headers is not None:
@@ -704,14 +684,10 @@ def _make_status_error(
704684
return _exceptions.BadRequestError(err_msg, response=response, body=body)
705685

706686
if response.status_code == 401:
707-
return _exceptions.AuthenticationError(
708-
err_msg, response=response, body=body
709-
)
687+
return _exceptions.AuthenticationError(err_msg, response=response, body=body)
710688

711689
if response.status_code == 403:
712-
return _exceptions.PermissionDeniedError(
713-
err_msg, response=response, body=body
714-
)
690+
return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
715691

716692
if response.status_code == 404:
717693
return _exceptions.NotFoundError(err_msg, response=response, body=body)
@@ -720,17 +696,13 @@ def _make_status_error(
720696
return _exceptions.ConflictError(err_msg, response=response, body=body)
721697

722698
if response.status_code == 422:
723-
return _exceptions.UnprocessableEntityError(
724-
err_msg, response=response, body=body
725-
)
699+
return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
726700

727701
if response.status_code == 429:
728702
return _exceptions.RateLimitError(err_msg, response=response, body=body)
729703

730704
if response.status_code >= 500:
731-
return _exceptions.InternalServerError(
732-
err_msg, response=response, body=body
733-
)
705+
return _exceptions.InternalServerError(err_msg, response=response, body=body)
734706
return APIStatusError(err_msg, response=response, body=body)
735707

736708

@@ -1069,9 +1041,7 @@ def knowledge_bases(
10691041
AsyncKnowledgeBasesResourceWithStreamingResponse,
10701042
)
10711043

1072-
return AsyncKnowledgeBasesResourceWithStreamingResponse(
1073-
self._client.knowledge_bases
1074-
)
1044+
return AsyncKnowledgeBasesResourceWithStreamingResponse(self._client.knowledge_bases)
10751045

10761046
@cached_property
10771047
def inference(self) -> inference.AsyncInferenceResourceWithStreamingResponse:
@@ -1121,9 +1091,7 @@ def load_balancers(
11211091
AsyncLoadBalancersResourceWithStreamingResponse,
11221092
)
11231093

1124-
return AsyncLoadBalancersResourceWithStreamingResponse(
1125-
self._client.load_balancers
1126-
)
1094+
return AsyncLoadBalancersResourceWithStreamingResponse(self._client.load_balancers)
11271095

11281096
@cached_property
11291097
def sizes(self) -> sizes.AsyncSizesResourceWithStreamingResponse:

src/gradientai/resources/agents/chat/completions.py

Lines changed: 8 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,7 @@ def create(
6262
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
6363
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
6464
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
65-
stream_options: (
66-
Optional[completion_create_params.StreamOptions] | NotGiven
67-
) = NOT_GIVEN,
65+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
6866
temperature: Optional[float] | NotGiven = NOT_GIVEN,
6967
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
7068
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -192,9 +190,7 @@ def create(
192190
n: Optional[int] | NotGiven = NOT_GIVEN,
193191
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
194192
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
195-
stream_options: (
196-
Optional[completion_create_params.StreamOptions] | NotGiven
197-
) = NOT_GIVEN,
193+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
198194
temperature: Optional[float] | NotGiven = NOT_GIVEN,
199195
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
200196
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -322,9 +318,7 @@ def create(
322318
n: Optional[int] | NotGiven = NOT_GIVEN,
323319
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
324320
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
325-
stream_options: (
326-
Optional[completion_create_params.StreamOptions] | NotGiven
327-
) = NOT_GIVEN,
321+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
328322
temperature: Optional[float] | NotGiven = NOT_GIVEN,
329323
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
330324
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -455,9 +449,7 @@ def create(
455449
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
456450
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
457451
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
458-
stream_options: (
459-
Optional[completion_create_params.StreamOptions] | NotGiven
460-
) = NOT_GIVEN,
452+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
461453
temperature: Optional[float] | NotGiven = NOT_GIVEN,
462454
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
463455
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -561,9 +553,7 @@ async def create(
561553
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
562554
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
563555
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
564-
stream_options: (
565-
Optional[completion_create_params.StreamOptions] | NotGiven
566-
) = NOT_GIVEN,
556+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
567557
temperature: Optional[float] | NotGiven = NOT_GIVEN,
568558
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
569559
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -691,9 +681,7 @@ async def create(
691681
n: Optional[int] | NotGiven = NOT_GIVEN,
692682
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
693683
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
694-
stream_options: (
695-
Optional[completion_create_params.StreamOptions] | NotGiven
696-
) = NOT_GIVEN,
684+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
697685
temperature: Optional[float] | NotGiven = NOT_GIVEN,
698686
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
699687
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -821,9 +809,7 @@ async def create(
821809
n: Optional[int] | NotGiven = NOT_GIVEN,
822810
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
823811
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
824-
stream_options: (
825-
Optional[completion_create_params.StreamOptions] | NotGiven
826-
) = NOT_GIVEN,
812+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
827813
temperature: Optional[float] | NotGiven = NOT_GIVEN,
828814
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
829815
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -951,9 +937,7 @@ async def create(
951937
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
952938
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
953939
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
954-
stream_options: (
955-
Optional[completion_create_params.StreamOptions] | NotGiven
956-
) = NOT_GIVEN,
940+
stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
957941
temperature: Optional[float] | NotGiven = NOT_GIVEN,
958942
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
959943
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,

tests/api_resources/agents/chat/test_completions.py

Lines changed: 9 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,7 @@
1515

1616

1717
class TestCompletions:
18-
parametrize = pytest.mark.parametrize(
19-
"client", [False, True], indirect=True, ids=["loose", "strict"]
20-
)
18+
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
2119

2220
@pytest.mark.skip()
2321
@parametrize
@@ -216,9 +214,7 @@ class TestAsyncCompletions:
216214

217215
@pytest.mark.skip()
218216
@parametrize
219-
async def test_method_create_overload_1(
220-
self, async_client: AsyncGradientAI
221-
) -> None:
217+
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
222218
completion = await async_client.agents.chat.completions.create(
223219
messages=[
224220
{
@@ -232,9 +228,7 @@ async def test_method_create_overload_1(
232228

233229
@pytest.mark.skip()
234230
@parametrize
235-
async def test_method_create_with_all_params_overload_1(
236-
self, async_client: AsyncGradientAI
237-
) -> None:
231+
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
238232
completion = await async_client.agents.chat.completions.create(
239233
messages=[
240234
{
@@ -274,9 +268,7 @@ async def test_method_create_with_all_params_overload_1(
274268

275269
@pytest.mark.skip()
276270
@parametrize
277-
async def test_raw_response_create_overload_1(
278-
self, async_client: AsyncGradientAI
279-
) -> None:
271+
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
280272
response = await async_client.agents.chat.completions.with_raw_response.create(
281273
messages=[
282274
{
@@ -294,9 +286,7 @@ async def test_raw_response_create_overload_1(
294286

295287
@pytest.mark.skip()
296288
@parametrize
297-
async def test_streaming_response_create_overload_1(
298-
self, async_client: AsyncGradientAI
299-
) -> None:
289+
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
300290
async with async_client.agents.chat.completions.with_streaming_response.create(
301291
messages=[
302292
{
@@ -316,9 +306,7 @@ async def test_streaming_response_create_overload_1(
316306

317307
@pytest.mark.skip()
318308
@parametrize
319-
async def test_method_create_overload_2(
320-
self, async_client: AsyncGradientAI
321-
) -> None:
309+
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
322310
completion_stream = await async_client.agents.chat.completions.create(
323311
messages=[
324312
{
@@ -333,9 +321,7 @@ async def test_method_create_overload_2(
333321

334322
@pytest.mark.skip()
335323
@parametrize
336-
async def test_method_create_with_all_params_overload_2(
337-
self, async_client: AsyncGradientAI
338-
) -> None:
324+
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
339325
completion_stream = await async_client.agents.chat.completions.create(
340326
messages=[
341327
{
@@ -375,9 +361,7 @@ async def test_method_create_with_all_params_overload_2(
375361

376362
@pytest.mark.skip()
377363
@parametrize
378-
async def test_raw_response_create_overload_2(
379-
self, async_client: AsyncGradientAI
380-
) -> None:
364+
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
381365
response = await async_client.agents.chat.completions.with_raw_response.create(
382366
messages=[
383367
{
@@ -395,9 +379,7 @@ async def test_raw_response_create_overload_2(
395379

396380
@pytest.mark.skip()
397381
@parametrize
398-
async def test_streaming_response_create_overload_2(
399-
self, async_client: AsyncGradientAI
400-
) -> None:
382+
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
401383
async with async_client.agents.chat.completions.with_streaming_response.create(
402384
messages=[
403385
{

0 commit comments

Comments
 (0)