Skip to content

Commit 05e2340

Browse files
authored
Merge pull request #411 from Portkey-AI/fix/remove-custom-chat-completions-stream-handling
chore: refactor chat and text completion methods to custom remove str…
2 parents 1141e31 + d03ca62 commit 05e2340

File tree

2 files changed

+10
-66
lines changed

2 files changed

+10
-66
lines changed

portkey_ai/api_resources/apis/chat_complete.py

Lines changed: 5 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def stream_create( # type: ignore[return]
7575
**kwargs,
7676
) -> Union[ChatCompletions, Iterator[ChatCompletionChunk]]:
7777
extra_headers = kwargs.get("extra_headers", {})
78-
with self.openai_client.with_streaming_response.chat.completions.create(
78+
return self.openai_client.chat.completions.create(
7979
model=model,
8080
messages=messages,
8181
stream=stream,
@@ -91,22 +91,7 @@ def stream_create( # type: ignore[return]
9191
store=store,
9292
extra_headers=extra_headers,
9393
extra_body=kwargs,
94-
) as response:
95-
for line in response.iter_lines():
96-
json_string = line.replace("data: ", "")
97-
json_string = json_string.strip().rstrip("\n")
98-
if json_string == "":
99-
continue
100-
if json_string.startswith(":"):
101-
continue
102-
elif json_string == "[DONE]":
103-
break
104-
elif json_string != "":
105-
json_data = json.loads(json_string)
106-
json_data = ChatCompletionChunk(**json_data)
107-
yield json_data
108-
else:
109-
return ""
94+
)
11095

11196
def normal_create(
11297
self,
@@ -488,7 +473,7 @@ async def stream_create(
488473
**kwargs,
489474
) -> Union[ChatCompletions, AsyncIterator[ChatCompletionChunk]]:
490475
extra_headers = kwargs.get("extra_headers", {})
491-
async with self.openai_client.with_streaming_response.chat.completions.create(
476+
return await self.openai_client.chat.completions.create(
492477
model=model,
493478
messages=messages,
494479
stream=stream,
@@ -504,22 +489,7 @@ async def stream_create(
504489
store=store,
505490
extra_headers=extra_headers,
506491
extra_body=kwargs,
507-
) as response:
508-
async for line in response.iter_lines():
509-
json_string = line.replace("data: ", "")
510-
json_string = json_string.strip().rstrip("\n")
511-
if json_string == "":
512-
continue
513-
if json_string.startswith(":"):
514-
continue
515-
elif json_string == "[DONE]":
516-
break
517-
elif json_string != "":
518-
json_data = json.loads(json_string)
519-
json_data = ChatCompletionChunk(**json_data)
520-
yield json_data
521-
else:
522-
pass
492+
)
523493

524494
async def normal_create(
525495
self,
@@ -579,7 +549,7 @@ async def create(
579549
**kwargs,
580550
) -> Union[ChatCompletions, AsyncIterator[ChatCompletionChunk]]:
581551
if stream is True:
582-
return self.stream_create(
552+
return await self.stream_create(
583553
model=model,
584554
messages=messages,
585555
stream=stream,

portkey_ai/api_resources/apis/complete.py

Lines changed: 5 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ def stream_create( # type: ignore[return]
3939
stream_options,
4040
**kwargs,
4141
) -> Union[TextCompletion, Iterator[TextCompletionChunk]]:
42-
with self.openai_client.with_streaming_response.completions.create(
42+
return self.openai_client.completions.create(
4343
model=model,
4444
prompt=prompt,
4545
stream=stream,
@@ -59,20 +59,7 @@ def stream_create( # type: ignore[return]
5959
user=user,
6060
stream_options=stream_options,
6161
extra_body=kwargs,
62-
) as response:
63-
for line in response.iter_lines():
64-
json_string = line.replace("data: ", "")
65-
json_string = json_string.strip().rstrip("\n")
66-
if json_string == "":
67-
continue
68-
elif json_string == "[DONE]":
69-
break
70-
elif json_string != "":
71-
json_data = json.loads(json_string)
72-
json_data = TextCompletionChunk(**json_data)
73-
yield json_data
74-
else:
75-
return ""
62+
)
7663

7764
def normal_create(
7865
self,
@@ -219,7 +206,7 @@ async def stream_create(
219206
stream_options,
220207
**kwargs,
221208
) -> Union[TextCompletion, AsyncIterator[TextCompletionChunk]]:
222-
async with self.openai_client.with_streaming_response.completions.create(
209+
return await self.openai_client.completions.create(
223210
model=model,
224211
prompt=prompt,
225212
stream=stream,
@@ -239,20 +226,7 @@ async def stream_create(
239226
user=user,
240227
stream_options=stream_options,
241228
extra_body=kwargs,
242-
) as response:
243-
async for line in response.iter_lines():
244-
json_string = line.replace("data: ", "")
245-
json_string = json_string.strip().rstrip("\n")
246-
if json_string == "":
247-
continue
248-
elif json_string == "[DONE]":
249-
break
250-
elif json_string != "":
251-
json_data = json.loads(json_string)
252-
json_data = TextCompletionChunk(**json_data)
253-
yield json_data
254-
else:
255-
pass
229+
)
256230

257231
async def normal_create(
258232
self,
@@ -327,7 +301,7 @@ async def create(
327301
**kwargs,
328302
) -> Union[TextCompletion, AsyncIterator[TextCompletionChunk]]:
329303
if stream is True:
330-
return self.stream_create(
304+
return await self.stream_create(
331305
model=model,
332306
prompt=prompt,
333307
stream=stream,

0 commit comments

Comments
 (0)