Skip to content

Commit 18591b7

Browse files
committed
fix some more tests
1 parent e684739 commit 18591b7

File tree

4 files changed

+32
-28
lines changed

4 files changed

+32
-28
lines changed

guardrails/async_guard.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -408,7 +408,7 @@ async def __call__(
408408

409409
# Determine the final value for messages
410410
messages = messages or messages_from_kwargs or messages_from_exec_opts or []
411-
print("===== messages is", messages)
411+
412412
if messages is not None and not len(messages):
413413
raise RuntimeError(
414414
"You must provide a prompt if messages is empty. "

guardrails/run/async_runner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -300,7 +300,7 @@ async def async_prepare(
300300
prompt_params = prompt_params or {}
301301
if api is None:
302302
raise UserFacingException(ValueError("API must be provided."))
303-
print("===== runner messages are", messages)
303+
304304
if messages:
305305
# Runner.prepare_messages
306306
messages = await self.prepare_messages(

guardrails/run/async_stream_runner.py

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,7 @@
1515
from guardrails.classes.output_type import OutputTypes
1616
from guardrails.constants import pass_status
1717
from guardrails.llm_providers import (
18-
AsyncLiteLLMCallable,
1918
AsyncPromptCallableBase,
20-
LiteLLMCallable,
2119
PromptCallableBase,
2220
)
2321
from guardrails.logger import set_scope
@@ -202,23 +200,24 @@ async def async_step(
202200
def get_chunk_text(self, chunk: Any, api: Union[PromptCallableBase, None]) -> str:
203201
"""Get the text from a chunk."""
204202
chunk_text = ""
205-
if isinstance(api, LiteLLMCallable):
203+
try:
206204
finished = chunk.choices[0].finish_reason
207-
content = chunk.choices[0].delta.content
205+
content = chunk.choices[0].text
208206
if not finished and content:
209207
chunk_text = content
210-
elif isinstance(api, AsyncLiteLLMCallable):
211-
finished = chunk.choices[0].finish_reason
212-
content = chunk.choices[0].delta.content
213-
if not finished and content:
214-
chunk_text = content
215-
else:
208+
except Exception as e:
216209
try:
217-
chunk_text = chunk
210+
finished = chunk.choices[0].finish_reason
211+
content = chunk.choices[0].delta.content
212+
if not finished and content:
213+
chunk_text = content
218214
except Exception as e:
219-
raise ValueError(
220-
f"Error getting chunk from stream: {e}. "
221-
"Non-OpenAI API callables expected to return "
222-
"a generator of strings."
223-
) from e
215+
try:
216+
chunk_text = chunk
217+
except Exception as e:
218+
raise ValueError(
219+
f"Error getting chunk from stream: {e}. "
220+
"Non-OpenAI API callables expected to return "
221+
"a generator of strings."
222+
) from e
224223
return chunk_text

guardrails/run/stream_runner.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from guardrails.classes.output_type import OT, OutputTypes
66
from guardrails.classes.validation_outcome import ValidationOutcome
77
from guardrails.llm_providers import (
8-
LiteLLMCallable,
98
PromptCallableBase,
109
)
1110
from guardrails.run.runner import Runner
@@ -249,20 +248,26 @@ def is_last_chunk(self, chunk: Any, api: Union[PromptCallableBase, None]) -> boo
249248
def get_chunk_text(self, chunk: Any, api: Union[PromptCallableBase, None]) -> str:
250249
"""Get the text from a chunk."""
251250
chunk_text = ""
252-
if isinstance(api, LiteLLMCallable):
251+
try:
253252
finished = chunk.choices[0].finish_reason
254-
content = chunk.choices[0].delta.content
253+
content = chunk.choices[0].text
255254
if not finished and content:
256255
chunk_text = content
257-
else:
256+
except Exception as e:
258257
try:
259-
chunk_text = chunk
258+
finished = chunk.choices[0].finish_reason
259+
content = chunk.choices[0].delta.content
260+
if not finished and content:
261+
chunk_text = content
260262
except Exception as e:
261-
raise ValueError(
262-
f"Error getting chunk from stream: {e}. "
263-
"Non-OpenAI API callables expected to return "
264-
"a generator of strings."
265-
) from e
263+
try:
264+
chunk_text = chunk
265+
except Exception as e:
266+
raise ValueError(
267+
f"Error getting chunk from stream: {e}. "
268+
"Non-OpenAI API callables expected to return "
269+
"a generator of strings."
270+
) from e
266271
return chunk_text
267272

268273
def parse(

0 commit comments

Comments
 (0)