Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions scenarios/inference/chat-app/chat-simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from azure.ai.projects import AIProjectClient
from azure.identity import DefaultAzureCredential

project_connection_string = "<your-connection-string-goes-here>"

project = AIProjectClient.from_connection_string(
conn_str=project_connection_string, credential=DefaultAzureCredential()
)

chat = project.inference.get_chat_completions_client()
response = chat.complete(
model="gpt-4o-mini",
messages=[
{
"role": "system",
"content": "You are an AI assistant that speaks like a techno punk rocker from 2350. Be cool but not too cool. Ya dig?",
},
{"role": "user", "content": "Hey, can you help me with my taxes? I'm a freelancer."},
],
)

print(response.choices[0].message.content)
41 changes: 41 additions & 0 deletions scenarios/inference/chat-app/chat-template.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# contents of this file meant to be appended to the end of chat-simple.py
# <chat_function>
from azure.ai.inference.prompts import PromptTemplate


def get_chat_response(messages, context):
# create a prompt template from an inline string (using mustache syntax)
prompt_template = PromptTemplate.from_string(
prompt_template="""
system:
You are an AI assistant that speaks like a techno punk rocker from 2350. Be cool but not too cool. Ya dig? Refer to the user by their first name, try to work their last name into a pun.

The user's first name is {{first_name}} and their last name is {{last_name}}.
"""
)

# generate system message from the template, passing in the context as variables
system_message = prompt_template.create_messages(data=context)

# add the prompt messages to the user messages
response = chat.complete(
model="gpt-4o-mini",
messages=system_message + messages,
temperature=1,
frequency_penalty=0.5,
presence_penalty=0.5,
)

return response


# </chat_function>

# <create_response>
if __name__ == "__main__":
response = get_chat_response(
messages=[{"role": "user", "content": "what city has the best food in the world?"}],
context={"first_name": "Jessie", "last_name": "Irwin"},
)
print(response.choices[0].message.content)
# </create_response>
Loading