Skip to content

Commit 31b5be2

Browse files
committed
Upgrade to version v1.0.1
1 parent 7959f86 commit 31b5be2

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+4605
-1
lines changed

CHANGELOG.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8-
## [1.0.1] - 2023-10-25
8+
## [1.0.1] - 2023-10-26
99

1010
### Updated
1111

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
#!/usr/bin/env python
2+
######################################################################################################################
3+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
4+
# #
5+
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
6+
# with the License. A copy of the License is located at #
7+
# #
8+
# http://www.apache.org/licenses/LICENSE-2.0 #
9+
# #
10+
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
11+
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
12+
# and limitations under the License. #
13+
######################################################################################################################
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
#!/usr/bin/env python
2+
######################################################################################################################
3+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
4+
# #
5+
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
6+
# with the License. A copy of the License is located at #
7+
# #
8+
# http://www.apache.org/licenses/LICENSE-2.0 #
9+
# #
10+
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
11+
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
12+
# and limitations under the License. #
13+
######################################################################################################################
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
#!/usr/bin/env python
2+
######################################################################################################################
3+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
4+
# #
5+
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
6+
# with the License. A copy of the License is located at #
7+
# #
8+
# http://www.apache.org/licenses/LICENSE-2.0 #
9+
# #
10+
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
11+
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
12+
# and limitations under the License. #
13+
######################################################################################################################
Lines changed: 257 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,257 @@
1+
#!/usr/bin/env python
2+
######################################################################################################################
3+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
4+
# #
5+
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
6+
# with the License. A copy of the License is located at #
7+
# #
8+
# http://www.apache.org/licenses/LICENSE-2.0 #
9+
# #
10+
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
11+
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
12+
# and limitations under the License. #
13+
######################################################################################################################
14+
15+
import json
16+
import os
17+
from copy import deepcopy
18+
from unittest.mock import patch
19+
20+
import pytest
21+
from clients.builders.anthropic_builder import AnthropicBuilder
22+
from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler
23+
from llm_models.anthropic import AnthropicLLM
24+
from llm_models.rag.anthropic_retrieval import AnthropicRetrievalLLM
25+
from shared.memory.ddb_chat_memory import DynamoDBChatMemory
26+
from utils.constants import (
27+
CONVERSATION_ID_EVENT_KEY,
28+
DEFAULT_ANTHROPIC_PLACEHOLDERS,
29+
DEFAULT_ANTHROPIC_PROMPT,
30+
DEFAULT_ANTHROPIC_RAG_PLACEHOLDERS,
31+
DEFAULT_ANTHROPIC_RAG_PROMPT,
32+
KENDRA_INDEX_ID_ENV_VAR,
33+
MEMORY_CONFIG,
34+
RAG_KEY,
35+
USER_ID_EVENT_KEY,
36+
)
37+
from utils.enum_types import ConversationMemoryTypes, LLMProviderTypes
38+
39+
40+
@pytest.mark.parametrize(
41+
"is_streaming, rag_enabled, llm_type, prompt, placeholders, rag_key",
42+
[
43+
(False, False, AnthropicLLM, DEFAULT_ANTHROPIC_PROMPT, DEFAULT_ANTHROPIC_PLACEHOLDERS, ""),
44+
(True, False, AnthropicLLM, DEFAULT_ANTHROPIC_PROMPT, DEFAULT_ANTHROPIC_PLACEHOLDERS, ""),
45+
(
46+
False,
47+
True,
48+
AnthropicRetrievalLLM,
49+
DEFAULT_ANTHROPIC_RAG_PROMPT,
50+
DEFAULT_ANTHROPIC_RAG_PLACEHOLDERS,
51+
RAG_KEY,
52+
),
53+
(True, True, AnthropicRetrievalLLM, DEFAULT_ANTHROPIC_RAG_PROMPT, DEFAULT_ANTHROPIC_RAG_PLACEHOLDERS, RAG_KEY),
54+
],
55+
)
56+
def test_set_llm_model(
57+
is_streaming,
58+
rag_enabled,
59+
llm_type,
60+
prompt,
61+
placeholders,
62+
rag_key,
63+
chat_event,
64+
llm_config,
65+
setup_environment,
66+
setup_secret,
67+
):
68+
config = json.loads(llm_config["Parameter"]["Value"])
69+
chat_event_body = json.loads(chat_event["body"])
70+
builder = AnthropicBuilder(
71+
llm_config=config,
72+
rag_enabled=rag_enabled,
73+
connection_id="fake-connection-id",
74+
conversation_id="fake-conversation-id",
75+
)
76+
user_id = chat_event.get("requestContext", {}).get("authorizer", {}).get(USER_ID_EVENT_KEY, {})
77+
78+
# Assign all the values to the builder attributes required to construct the LLMChat object
79+
builder.set_knowledge_base()
80+
builder.set_memory_constants(LLMProviderTypes.ANTHROPIC.value)
81+
builder.set_conversation_memory(user_id, chat_event_body[CONVERSATION_ID_EVENT_KEY])
82+
builder.set_api_key()
83+
84+
if is_streaming:
85+
with patch(
86+
"clients.builders.llm_builder.WebsocketStreamingCallbackHandler",
87+
return_value=AsyncIteratorCallbackHandler(),
88+
):
89+
builder.set_llm_model()
90+
else:
91+
builder.set_llm_model()
92+
93+
assert type(builder.llm_model) == llm_type
94+
assert builder.llm_model.model == config["LlmParams"]["ModelId"]
95+
assert builder.llm_model.prompt_template.template == prompt
96+
assert set(builder.llm_model.prompt_template.input_variables) == set(placeholders)
97+
assert builder.llm_model.model_params == {
98+
"max_length": 100,
99+
"temperature": 0.2,
100+
"top_p": 0.2,
101+
}
102+
assert builder.llm_model.api_token == "fake-secret-value"
103+
assert builder.llm_model.streaming == config["LlmParams"]["Streaming"]
104+
assert builder.llm_model.verbose == config["LlmParams"]["Verbose"]
105+
if rag_enabled:
106+
assert builder.llm_model.knowledge_base.kendra_index_id == os.getenv(KENDRA_INDEX_ID_ENV_VAR)
107+
else:
108+
assert builder.llm_model.knowledge_base == None
109+
assert builder.llm_model.conversation_memory.memory_type == ConversationMemoryTypes.DynamoDB.value
110+
assert type(builder.llm_model.conversation_memory) == DynamoDBChatMemory
111+
assert (
112+
builder.llm_model.conversation_memory.memory_key
113+
== MEMORY_CONFIG[LLMProviderTypes.ANTHROPIC.value + rag_key]["history"]
114+
)
115+
assert (
116+
builder.llm_model.conversation_memory.input_key
117+
== MEMORY_CONFIG[LLMProviderTypes.ANTHROPIC.value + rag_key]["input"]
118+
)
119+
assert (
120+
builder.llm_model.conversation_memory.output_key
121+
== MEMORY_CONFIG[LLMProviderTypes.ANTHROPIC.value + rag_key]["output"]
122+
)
123+
assert (
124+
builder.llm_model.conversation_memory.human_prefix
125+
== MEMORY_CONFIG[LLMProviderTypes.ANTHROPIC.value + rag_key]["human_prefix"]
126+
)
127+
assert (
128+
builder.llm_model.conversation_memory.ai_prefix
129+
== MEMORY_CONFIG[LLMProviderTypes.ANTHROPIC.value + rag_key]["ai_prefix"]
130+
)
131+
132+
if is_streaming:
133+
assert builder.callbacks
134+
else:
135+
assert builder.callbacks is None
136+
137+
138+
@pytest.mark.parametrize(
139+
"prompt, is_streaming, rag_enabled",
140+
[
141+
(DEFAULT_ANTHROPIC_PROMPT, True, False),
142+
(DEFAULT_ANTHROPIC_PROMPT, False, False),
143+
(DEFAULT_ANTHROPIC_RAG_PROMPT, True, True),
144+
(DEFAULT_ANTHROPIC_RAG_PROMPT, False, True),
145+
],
146+
)
147+
def test_set_llm_model_throws_error_missing_memory(llm_config, chat_event, setup_environment, setup_secret):
148+
config = json.loads(llm_config["Parameter"]["Value"])
149+
builder = AnthropicBuilder(
150+
llm_config=config,
151+
rag_enabled=False,
152+
connection_id="fake-connection-id",
153+
conversation_id="fake-conversation-id",
154+
)
155+
156+
builder.set_knowledge_base()
157+
builder.set_api_key()
158+
with patch(
159+
"clients.builders.llm_builder.WebsocketStreamingCallbackHandler",
160+
return_value=AsyncIteratorCallbackHandler(),
161+
):
162+
with pytest.raises(ValueError) as error:
163+
builder.set_llm_model()
164+
165+
assert error.value.args[0] == "Conversation Memory was set to null."
166+
167+
168+
@pytest.mark.parametrize(
169+
"prompt, is_streaming, rag_enabled",
170+
[
171+
(DEFAULT_ANTHROPIC_PROMPT, True, False),
172+
(DEFAULT_ANTHROPIC_PROMPT, False, False),
173+
(DEFAULT_ANTHROPIC_RAG_PROMPT, True, True),
174+
(DEFAULT_ANTHROPIC_RAG_PROMPT, False, True),
175+
],
176+
)
177+
def test_set_llm_model_with_errors(llm_config):
178+
parsed_config = json.loads(llm_config["Parameter"]["Value"])
179+
builder = AnthropicBuilder(
180+
llm_config=parsed_config,
181+
rag_enabled=False,
182+
connection_id="fake-connection-id",
183+
conversation_id="fake-conversation-id",
184+
)
185+
builder.errors = ["some-error-1", "some-error-2"]
186+
builder.conversation_memory = ""
187+
188+
with patch(
189+
"clients.builders.llm_builder.WebsocketStreamingCallbackHandler",
190+
return_value=AsyncIteratorCallbackHandler(),
191+
):
192+
with pytest.raises(ValueError) as error:
193+
builder.set_llm_model()
194+
195+
assert (
196+
error.value.args[0] == "There are errors in the following configuration parameters:\nsome-error-1\nsome-error-2"
197+
)
198+
199+
200+
@pytest.mark.parametrize(
201+
"prompt, is_streaming, rag_enabled",
202+
[
203+
(DEFAULT_ANTHROPIC_PROMPT, True, False),
204+
(DEFAULT_ANTHROPIC_PROMPT, False, False),
205+
(DEFAULT_ANTHROPIC_RAG_PROMPT, True, True),
206+
(DEFAULT_ANTHROPIC_RAG_PROMPT, False, True),
207+
],
208+
)
209+
def test_set_llm_model_with_missing_config_fields(llm_config):
210+
parsed_config = deepcopy(json.loads(llm_config["Parameter"]["Value"]))
211+
del parsed_config["LlmParams"]
212+
builder = AnthropicBuilder(
213+
llm_config=parsed_config,
214+
rag_enabled=False,
215+
connection_id="fake-connection-id",
216+
conversation_id="fake-conversation-id",
217+
)
218+
219+
with pytest.raises(ValueError) as error:
220+
builder.set_llm_model()
221+
222+
assert (
223+
error.value.args[0]
224+
== "There are errors in the following configuration parameters:\nMissing required field (LlmParams) containing LLM configuration in the config which is required to construct the LLM."
225+
)
226+
227+
228+
@pytest.mark.parametrize(
229+
"prompt, is_streaming, rag_enabled, model",
230+
[
231+
(DEFAULT_ANTHROPIC_PROMPT, False, False, AnthropicLLM),
232+
(DEFAULT_ANTHROPIC_PROMPT, True, False, AnthropicLLM),
233+
(DEFAULT_ANTHROPIC_RAG_PROMPT, False, True, AnthropicRetrievalLLM),
234+
(DEFAULT_ANTHROPIC_RAG_PROMPT, True, True, AnthropicRetrievalLLM),
235+
],
236+
)
237+
def test_returned_anthropic_model(llm_config, chat_event, rag_enabled, model, setup_environment, setup_secret):
238+
config = json.loads(llm_config["Parameter"]["Value"])
239+
chat_event_body = json.loads(chat_event["body"])
240+
builder = AnthropicBuilder(
241+
connection_id="fake-connection-id",
242+
conversation_id="fake-conversation-id",
243+
llm_config=config,
244+
rag_enabled=rag_enabled,
245+
)
246+
user_id = chat_event.get("requestContext", {}).get("authorizer", {}).get(USER_ID_EVENT_KEY, {})
247+
248+
builder.set_knowledge_base()
249+
builder.set_memory_constants(LLMProviderTypes.ANTHROPIC.value)
250+
builder.set_conversation_memory(user_id, chat_event_body[CONVERSATION_ID_EVENT_KEY])
251+
builder.set_api_key()
252+
with patch(
253+
"clients.builders.llm_builder.WebsocketStreamingCallbackHandler",
254+
return_value=AsyncIteratorCallbackHandler(),
255+
):
256+
builder.set_llm_model()
257+
assert type(builder.llm_model) == model

0 commit comments

Comments
 (0)