Skip to content

Commit 1d2c585

Browse files
committed
add client using OpenAI library
1 parent e5e3cc3 commit 1d2c585

File tree

3 files changed

+113
-40
lines changed

3 files changed

+113
-40
lines changed

backend/internals/azure_openai.py

Lines changed: 31 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,7 @@
22
from collections.abc import AsyncIterable
33
from logging import getLogger
44

5-
from langchain.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
6-
from langchain_core.messages import HumanMessage, SystemMessage
7-
from langchain_openai import AzureChatOpenAI
5+
from openai import AzureOpenAI
86

97
from backend.settings.azure_openai import Settings
108

@@ -15,40 +13,39 @@ class Client:
1513
def __init__(self, settings: Settings) -> None:
1614
self.settings = settings
1715

18-
def get_client(self) -> AzureChatOpenAI:
19-
return AzureChatOpenAI(
16+
def get_client(self) -> AzureOpenAI:
17+
return AzureOpenAI(
2018
api_key=self.settings.azure_openai_api_key,
2119
api_version=self.settings.azure_openai_api_version,
2220
azure_endpoint=self.settings.azure_openai_endpoint,
23-
azure_deployment=self.settings.azure_openai_gpt_model,
2421
)
2522

2623
def create_chat_completions(
2724
self,
2825
content: str,
2926
) -> str:
30-
response = self.get_client().invoke(
31-
[
32-
HumanMessage(
33-
content=content,
34-
),
35-
]
27+
response = self.get_client().chat.completions.create(
28+
model=self.settings.azure_openai_gpt_model,
29+
messages=[
30+
{"role": "user", "content": content},
31+
],
32+
stream=False,
3633
)
3734
logger.info(response)
38-
return response.content
35+
return response.choices[0].message.content
3936

4037
async def create_chat_completions_stream(
4138
self,
4239
content: str,
4340
) -> AsyncIterable[str]:
44-
llm = self.get_client()
45-
messages = [HumanMessagePromptTemplate.from_template(template="{message}")]
46-
prompt = ChatPromptTemplate.from_messages(messages)
47-
chain = prompt | llm
48-
res = chain.astream({"message": content})
49-
async for msg in res:
50-
logger.info(msg)
51-
yield msg.content
41+
_ = self.get_client().chat.completions.create(
42+
model=self.settings.azure_openai_gpt_model,
43+
messages=[
44+
{"role": "user", "content": content},
45+
],
46+
stream=True,
47+
)
48+
assert False, "Yet to be implemented."
5249

5350
def create_chat_completions_with_vision(
5451
self,
@@ -58,24 +55,19 @@ def create_chat_completions_with_vision(
5855
) -> str:
5956
encoded_image = b64encode(image).decode("ascii")
6057

61-
response = self.get_client().invoke(
62-
[
63-
SystemMessage(
64-
content=system_prompt,
65-
),
66-
HumanMessage(
67-
content=[
68-
{
69-
"type": "text",
70-
"text": user_prompt,
71-
},
72-
{
73-
"type": "image_url",
74-
"image_url": {"url": f"data:image/jpeg;base64,{encoded_image}"},
75-
},
58+
response = self.get_client().chat.completions.create(
59+
model=self.settings.azure_openai_gpt_model,
60+
messages=[
61+
{"role": "system", "content": system_prompt},
62+
{
63+
"role": "user",
64+
"content": [
65+
{"type": "text", "text": user_prompt},
66+
{"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{encoded_image}"}},
7667
],
77-
),
78-
]
68+
},
69+
],
70+
stream=False,
7971
)
8072
logger.info(response)
81-
return response.content
73+
return response.choices[0].message.content
Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
from base64 import b64encode
2+
from collections.abc import AsyncIterable
3+
from logging import getLogger
4+
5+
from langchain.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
6+
from langchain_core.messages import HumanMessage, SystemMessage
7+
from langchain_openai import AzureChatOpenAI
8+
9+
from backend.settings.azure_openai import Settings
10+
11+
logger = getLogger(__name__)
12+
13+
14+
class Client:
15+
def __init__(self, settings: Settings) -> None:
16+
self.settings = settings
17+
18+
def get_client(self) -> AzureChatOpenAI:
19+
return AzureChatOpenAI(
20+
api_key=self.settings.azure_openai_api_key,
21+
api_version=self.settings.azure_openai_api_version,
22+
azure_endpoint=self.settings.azure_openai_endpoint,
23+
azure_deployment=self.settings.azure_openai_gpt_model,
24+
)
25+
26+
def create_chat_completions(
27+
self,
28+
content: str,
29+
) -> str:
30+
response = self.get_client().invoke(
31+
[
32+
HumanMessage(
33+
content=content,
34+
),
35+
]
36+
)
37+
logger.info(response)
38+
return response.content
39+
40+
async def create_chat_completions_stream(
41+
self,
42+
content: str,
43+
) -> AsyncIterable[str]:
44+
llm = self.get_client()
45+
messages = [HumanMessagePromptTemplate.from_template(template="{message}")]
46+
prompt = ChatPromptTemplate.from_messages(messages)
47+
chain = prompt | llm
48+
res = chain.astream({"message": content})
49+
async for msg in res:
50+
logger.info(msg)
51+
yield msg.content
52+
53+
def create_chat_completions_with_vision(
54+
self,
55+
system_prompt: str,
56+
user_prompt: str,
57+
image: bytes,
58+
) -> str:
59+
encoded_image = b64encode(image).decode("ascii")
60+
61+
response = self.get_client().invoke(
62+
[
63+
SystemMessage(
64+
content=system_prompt,
65+
),
66+
HumanMessage(
67+
content=[
68+
{
69+
"type": "text",
70+
"text": user_prompt,
71+
},
72+
{
73+
"type": "image_url",
74+
"image_url": {"url": f"data:image/jpeg;base64,{encoded_image}"},
75+
},
76+
],
77+
),
78+
]
79+
)
80+
logger.info(response)
81+
return response.content

backend/routers/azure_openai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from fastapi import APIRouter, UploadFile
44
from fastapi.responses import StreamingResponse
55

6-
from backend.internals.azure_openai import Client
6+
from backend.internals.azure_openai_langchain import Client
77
from backend.schemas import azure_openai as azure_openai_schemas
88
from backend.settings.azure_openai import Settings
99

0 commit comments

Comments
 (0)