Skip to content

Commit e3982c4

Browse files
authored
Merge pull request #82 from alkem-io/update-response-prompt-for-mistral-medium-2505
Update response prompt to work with mistral-medium-2505
2 parents 796a7df + 0fc46c7 commit e3982c4

File tree

5 files changed

+1139
-1073
lines changed

5 files changed

+1139
-1073
lines changed

ai_adapter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ async def query_chain(input: Input) -> Response:
6767

6868
messages = [
6969
SystemMessage(content=bok_system_prompt.format(knowledge=context)),
70-
SystemMessage(content=response_system_prompt.format(context=context)),
70+
SystemMessage(content=response_system_prompt),
7171
UserMessage(content=message),
7272
]
7373

models.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,9 @@
88

99
from azure.ai.inference import ChatCompletionsClient
1010
from azure.core.credentials import AzureKeyCredential
11+
from logger import setup_logger
12+
13+
logger = setup_logger(__name__)
1114

1215
llm = ChatCompletionsClient(
1316
endpoint=env.mistral_endpoint,
@@ -18,13 +21,17 @@
1821
def invoke_model(messages, temperature=None):
1922
if temperature is None:
2023
temperature = env.model_temperature
24+
2125
result = llm.complete(
2226
messages=messages,
2327
temperature=temperature,
2428
top_p=1,
2529
stream=False,
2630
)
2731
message = str(result["choices"][0]["message"]["content"])
32+
33+
logger.debug(message)
34+
2835
return message
2936

3037

0 commit comments

Comments
 (0)