diff --git a/portkey_ai/api_resources/apis/chat_complete.py b/portkey_ai/api_resources/apis/chat_complete.py index fc87b78..15a25d9 100644 --- a/portkey_ai/api_resources/apis/chat_complete.py +++ b/portkey_ai/api_resources/apis/chat_complete.py @@ -74,7 +74,11 @@ def stream_create( # type: ignore[return] store, **kwargs, ) -> Union[ChatCompletions, Iterator[ChatCompletionChunk]]: - extra_headers = kwargs.get("extra_headers", {}) + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} return self.openai_client.chat.completions.create( model=model, messages=messages, @@ -90,7 +94,9 @@ def stream_create( # type: ignore[return] reasoning_effort=reasoning_effort, store=store, extra_headers=extra_headers, - extra_body=kwargs, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) def normal_create( @@ -110,7 +116,11 @@ def normal_create( store, **kwargs, ) -> ChatCompletions: - extra_headers = kwargs.get("extra_headers", {}) + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} response = self.openai_client.with_raw_response.chat.completions.create( model=model, messages=messages, @@ -126,7 +136,9 @@ def normal_create( reasoning_effort=reasoning_effort, store=store, extra_headers=extra_headers, - extra_body=kwargs, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) data = ChatCompletions(**json.loads(response.text)) data._headers = response.headers @@ -472,7 +484,11 @@ async def stream_create( store, **kwargs, ) -> Union[ChatCompletions, AsyncIterator[ChatCompletionChunk]]: - extra_headers = kwargs.get("extra_headers", {}) + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} return await self.openai_client.chat.completions.create( model=model, messages=messages, @@ -488,7 +504,9 @@ async def stream_create( reasoning_effort=reasoning_effort, store=store, extra_headers=extra_headers, - extra_body=kwargs, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) async def normal_create( @@ -508,7 +526,11 @@ async def normal_create( store, **kwargs, ) -> ChatCompletions: - extra_headers = kwargs.get("extra_headers", {}) + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} response = await self.openai_client.with_raw_response.chat.completions.create( model=model, messages=messages, @@ -524,7 +546,9 @@ async def normal_create( reasoning_effort=reasoning_effort, store=store, extra_headers=extra_headers, - extra_body=kwargs, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) data = ChatCompletions(**json.loads(response.text)) data._headers = response.headers diff --git a/portkey_ai/api_resources/apis/complete.py b/portkey_ai/api_resources/apis/complete.py index 0299c4f..24fa0ef 100644 --- a/portkey_ai/api_resources/apis/complete.py +++ b/portkey_ai/api_resources/apis/complete.py @@ -39,6 +39,11 @@ def stream_create( # type: ignore[return] stream_options, **kwargs, ) -> Union[TextCompletion, Iterator[TextCompletionChunk]]: + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} return self.openai_client.completions.create( model=model, prompt=prompt, @@ -58,7 +63,10 @@ def stream_create( # type: ignore[return] suffix=suffix, user=user, stream_options=stream_options, - extra_body=kwargs, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) def normal_create( @@ -83,6 +91,11 @@ def normal_create( stream_options, **kwargs, ) -> TextCompletion: + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} response = self.openai_client.with_raw_response.completions.create( model=model, prompt=prompt, @@ -102,7 +115,10 @@ def normal_create( suffix=suffix, user=user, stream_options=stream_options, - extra_body=kwargs, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) data = TextCompletion(**json.loads(response.text)) data._headers = response.headers @@ -206,6 +222,11 @@ async def stream_create( stream_options, **kwargs, ) -> Union[TextCompletion, AsyncIterator[TextCompletionChunk]]: + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} return await self.openai_client.completions.create( model=model, prompt=prompt, @@ -225,7 +246,10 @@ async def stream_create( suffix=suffix, user=user, stream_options=stream_options, - extra_body=kwargs, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) async def normal_create( @@ -250,6 +274,11 @@ async def normal_create( stream_options, **kwargs, ) -> TextCompletion: + extra_headers = kwargs.pop("extra_headers", None) + extra_query = kwargs.pop("extra_query", None) + timeout = kwargs.pop("timeout", None) + user_extra_body = kwargs.pop("extra_body", None) or {} + merged_extra_body = {**user_extra_body, **kwargs} response = await self.openai_client.with_raw_response.completions.create( model=model, prompt=prompt, @@ -269,7 +298,10 @@ async def normal_create( suffix=suffix, user=user, stream_options=stream_options, - extra_body=kwargs, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=merged_extra_body, + timeout=timeout, ) data = TextCompletion(**json.loads(response.text)) data._headers = response.headers