Skip to content

Commit 8d719ef

Browse files
committed
feat: enhance support for Azure OpenAI
1 parent c6f991a commit 8d719ef

File tree

1 file changed

+44
-21
lines changed

1 file changed

+44
-21
lines changed

gpt_code_ui/webapp/main.py

Lines changed: 44 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,12 @@
1717

1818
load_dotenv('.env')
1919

20+
OPENAI_API_TYPE = os.environ.get("OPENAI_API_TYPE", "openai")
2021
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
2122
OPENAI_BASE_URL = os.environ.get("OPENAI_BASE_URL", "https://api.openai.com")
23+
OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION", "2023-03-15-preview")
24+
AZURE_OPENAI_DEPLOYMENT = os.environ.get("AZURE_OPENAI_DEPLOYMENT", "")
25+
2226

2327
UPLOAD_FOLDER = 'workspace/'
2428
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
@@ -55,32 +59,51 @@ def allowed_file(filename):
5559
async def get_code(user_prompt, user_openai_key=None, model="gpt-3.5-turbo"):
5660

5761
prompt = f"First, here is a history of what I asked you to do earlier. The actual prompt follows after ENDOFHISTORY. History:\n\n{message_buffer.get_string()}ENDOFHISTORY.\n\nWrite Python code that does the following: \n\n{user_prompt}\n\nNote, the code is going to be executed in a Jupyter Python kernel.\n\nLast instruction, and this is the most important, just return code. No other outputs, as your full response will directly be executed in the kernel. \n\nTeacher mode: if you want to give a download link, just print it as <a href='/download?file=INSERT_FILENAME_HERE'>Download file</a>. Replace INSERT_FILENAME_HERE with the actual filename. So just print that HTML to stdout. No actual downloading of files!"
58-
59-
data = {
60-
"model": model,
61-
"messages": [
62-
{
63-
"role": "user",
64-
"content": prompt,
65-
},
66-
],
67-
"temperature": 0.7,
68-
}
62+
temperature = 0.7
63+
message_array = [
64+
{
65+
"role": "user",
66+
"content": prompt,
67+
},
68+
]
6969

7070
final_openai_key = OPENAI_API_KEY
7171
if user_openai_key:
7272
final_openai_key = user_openai_key
7373

74-
headers = {
75-
"Content-Type": "application/json",
76-
"Authorization": f"Bearer {final_openai_key}",
77-
}
78-
79-
response = requests.post(
80-
f"{OPENAI_BASE_URL}/v1/chat/completions",
81-
data=json.dumps(data),
82-
headers=headers,
83-
)
74+
if OPENAI_API_TYPE == "openai":
75+
data = {
76+
"model": model,
77+
"messages": message_array,
78+
"temperature": temperature,
79+
}
80+
headers = {
81+
"Content-Type": "application/json",
82+
"Authorization": f"Bearer {final_openai_key}",
83+
}
84+
85+
response = requests.post(
86+
f"{OPENAI_BASE_URL}/v1/chat/completions",
87+
data=json.dumps(data),
88+
headers=headers,
89+
)
90+
elif OPENAI_API_TYPE == "azure":
91+
data = {
92+
"messages": message_array,
93+
"temperature": temperature,
94+
}
95+
headers = {
96+
"Content-Type": "application/json",
97+
"api-key": f"{final_openai_key}",
98+
}
99+
100+
response = requests.post(
101+
f"{OPENAI_BASE_URL}/openai/deployments/{AZURE_OPENAI_DEPLOYMENT}/chat/completions?api-version={OPENAI_API_VERSION}",
102+
data=json.dumps(data),
103+
headers=headers,
104+
)
105+
else:
106+
return "Error: Invalid OPENAI_PROVIDER", 500
84107

85108
def extract_code(text):
86109
# Match triple backtick blocks first

0 commit comments

Comments
 (0)