Skip to content

Commit 7b81b3f

Browse files
Stop streaming
1 parent 21977cf commit 7b81b3f

File tree

3 files changed

+5
-11
lines changed

3 files changed

+5
-11
lines changed

src/common/tools/review_pull_request.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ async def arun(
2626
cls: type[ReviewPullRequest],
2727
*args: Any, # noqa: ARG003
2828
**kwargs: Any,
29-
) -> AsyncIterator[str]:
29+
) -> str:
3030
"""Use the tool asynchronously.
3131
3232
:return: Tool output.
@@ -47,7 +47,7 @@ async def arun(
4747
patch=request.patch,
4848
file_content=request.file_content,
4949
)
50-
return ask_llm(
50+
return await ask_llm(
5151
system_prompt=system_prompt,
5252
user_prompt=user_prompt,
5353
)

src/reviews/services/pull_requests_service.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,7 @@ async def _review_file_diff(
6666
:param semaphore: The semaphore.
6767
:return: A mapping from the file name to the review content.
6868
"""
69-
answer = ""
7069
async with semaphore:
71-
review_content_iterator = await ReviewPullRequest.arun(request=file_diff)
72-
73-
async for review_content in review_content_iterator:
74-
answer += review_content
70+
answer = await ReviewPullRequest.arun(request=file_diff)
7571

7672
return {file_diff.filename: answer}

src/utils/llm.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ async def ask_llm(
3939
user_prompt: str,
4040
memory: list[BaseMessage] | None = None,
4141
llm_model: str = config.provider_to_llm[config.llm_provider],
42-
) -> AsyncIterator[str]:
42+
) -> str:
4343
"""Ask the LLM for a response.
4444
4545
:param system_prompt: The system prompt.
@@ -54,6 +54,4 @@ async def ask_llm(
5454
]
5555
chat_model = get_chat_model(llm_model=llm_model)
5656

57-
answer_iterator = chat_model.astream(input=messages)
58-
async for answer in answer_iterator:
59-
yield answer.content
57+
return (await chat_model.ainvoke(input=messages)).content

0 commit comments

Comments
 (0)