Skip to content

Commit 1abea9e

Browse files
author
Lloyd Hamilton
committed
Removed stream parser, favouring return of raw stream from generator
1 parent 3329ccc commit 1abea9e

File tree

1 file changed

+7
-13
lines changed

1 file changed

+7
-13
lines changed

adalflow/adalflow/components/model_client/bedrock_client.py

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313

1414
from adalflow.core.model_client import ModelClient
1515
from adalflow.core.types import ModelType, CompletionUsage, GeneratorOutput
16+
from adalflow.utils import printc
1617

1718
from adalflow.utils.lazy_import import safe_import, OptionalPackages
1819

@@ -165,27 +166,20 @@ def init_sync_client(self):
165166
def init_async_client(self):
166167
raise NotImplementedError("Async call not implemented yet.")
167168

168-
@staticmethod
169-
def parse_stream_response(completion: dict) -> str:
170-
if "contentBlockDelta" in completion:
171-
if delta_chunk := completion["contentBlockDelta"]["delta"]:
172-
return delta_chunk["text"]
173-
return ''
174-
175169
def handle_stream_response(self, stream: dict) -> GeneratorType:
176170
try:
177-
for chunk in stream["stream"]:
171+
stream: GeneratorType = stream["stream"]
172+
for chunk in stream:
178173
log.debug(f"Raw chunk: {chunk}")
179-
parsed_content = self.parse_stream_response(chunk)
180-
yield parsed_content
174+
yield chunk
181175
except Exception as e:
182176
print(f"Error in handle_stream_response: {e}") # Debug print
183177
raise
184178

185179
def parse_chat_completion(self, completion: dict) -> "GeneratorOutput":
186180
"""Parse the completion, and put it into the raw_response."""
187181
try:
188-
data = self.handle_stream_response(completion)
182+
data = self.chat_completion_parser(completion)
189183
return GeneratorOutput(
190184
data=None, error=None, raw_response=data
191185
)
@@ -254,19 +248,19 @@ def call(
254248
self,
255249
api_kwargs: Dict = {},
256250
model_type: ModelType = ModelType.UNDEFINED,
257-
stream: bool = False
258251
) -> dict:
259252
"""
260253
kwargs is the combined input and model_kwargs
261254
"""
262255
if model_type == ModelType.LLM:
263256
if "stream" in api_kwargs and api_kwargs.get("stream", False):
264257
log.debug("Streaming call")
258+
printc("Streaming")
265259
api_kwargs.pop("stream") # stream is not a valid parameter for bedrock
266260
self.chat_completion_parser = self.handle_stream_response
267261
return self.sync_client.converse_stream(**api_kwargs)
268262
else:
269-
api_kwargs.pop("stream")
263+
api_kwargs.pop("stream", None)
270264
return self.sync_client.converse(**api_kwargs)
271265
else:
272266
raise ValueError(f"model_type {model_type} is not supported")

0 commit comments

Comments
 (0)