Skip to content

Commit 5895cfc

Browse files
authored
test: update Bedrock tests with ComponentInfo (#343)
* test: update Bedrock tests with ComponentInfo * update transformers
1 parent 6666e83 commit 5895cfc

File tree

2 files changed

+22
-5
lines changed

2 files changed

+22
-5
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ fmt-check = "ruff check {args} && ruff format --check {args}"
5252
[tool.hatch.envs.test]
5353
extra-dependencies = [
5454
"colorama", # Pipeline checkpoints experiment
55-
"transformers[torch,sentencepiece]>=4.51.1,<4.52", # Pipeline checkpoints experiment
55+
"transformers[torch,sentencepiece]>=4.52.4,<4.53", # Pipeline checkpoints experiment
5656
"arrow>=1.3.0", # Multimodal experiment - ChatPromptBuilder
5757
"pypdfium2", # Multimodal experiment - PDFToImageContent
5858
"pillow", # Multimodal experiment - ImageFileToImageContent, PDFToImageContent

test/components/generators/chat/amazon_bedrock/test_chat_generator_utils.py

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import pytest
22
import base64
33

4-
from haystack.dataclasses import StreamingChunk
4+
from haystack.dataclasses import ComponentInfo, StreamingChunk
55
from haystack.tools import Tool
66
from haystack_integrations.components.generators.amazon_bedrock.chat.utils import (
77
_format_tools,
@@ -15,7 +15,6 @@
1515
)
1616
from haystack_experimental.dataclasses.chat_message import ChatMessage, ChatRole, ToolCall, ImageContent
1717

18-
1918
# NOTE: original module and tests
2019
def weather(city: str):
2120
"""Get weather for a given city."""
@@ -346,6 +345,9 @@ def test_process_streaming_response_one_tool_call(self, mock_boto3_session):
346345
Test that process_streaming_response correctly handles streaming events and accumulates responses
347346
"""
348347
model = "anthropic.claude-3-5-sonnet-20240620-v1:0"
348+
type_ = (
349+
"haystack_integrations.components.generators.amazon_bedrock.chat.chat_generator.AmazonBedrockChatGenerator"
350+
)
349351
streaming_chunks = []
350352

351353
def test_callback(chunk: StreamingChunk):
@@ -386,7 +388,11 @@ def test_callback(chunk: StreamingChunk):
386388
},
387389
]
388390

389-
replies = _parse_streaming_response(events, test_callback, model)
391+
component_info = ComponentInfo(
392+
type=type_,
393+
)
394+
395+
replies = _parse_streaming_response(events, test_callback, model, component_info)
390396
# Pop completion_start_time since it will always change
391397
replies[0].meta.pop("completion_start_time")
392398
expected_messages = [
@@ -421,12 +427,19 @@ def test_callback(chunk: StreamingChunk):
421427
}
422428
]
423429

430+
for chunk in streaming_chunks:
431+
assert chunk.component_info.type == type_
432+
assert chunk.component_info.name is None # not in a pipeline
433+
424434
# Verify final replies
425435
assert len(replies) == 1
426436
assert replies == expected_messages
427437

428438
def test_parse_streaming_response_with_two_tool_calls(self, mock_boto3_session):
429439
model = "anthropic.claude-3-5-sonnet-20240620-v1:0"
440+
type_ = (
441+
"haystack_integrations.components.generators.amazon_bedrock.chat.chat_generator.AmazonBedrockChatGenerator"
442+
)
430443
streaming_chunks = []
431444

432445
def test_callback(chunk: StreamingChunk):
@@ -475,7 +488,11 @@ def test_callback(chunk: StreamingChunk):
475488
},
476489
]
477490

478-
replies = _parse_streaming_response(events, test_callback, model)
491+
component_info = ComponentInfo(
492+
type=type_,
493+
)
494+
495+
replies = _parse_streaming_response(events, test_callback, model, component_info)
479496
# Pop completion_start_time since it will always change
480497
replies[0].meta.pop("completion_start_time")
481498
expected_messages = [

0 commit comments

Comments
 (0)