|
1 |
| -import json |
2 |
| -import logging |
3 |
| -from os import path |
4 |
| -import requests |
5 |
| -from openai import AzureOpenAI |
6 |
| -import mimetypes |
7 |
| -from flask import Flask, Response, request, jsonify |
8 |
| -from dotenv import load_dotenv |
9 |
| -import sys |
10 |
| -from backend.batch.utilities.helpers.EnvHelper import EnvHelper |
11 | 1 | from azure.monitor.opentelemetry import configure_azure_monitor
|
| 2 | +from create_app import create_app |
12 | 3 |
|
13 |
| -# Fixing MIME types for static files under Windows |
14 |
| -mimetypes.add_type("application/javascript", ".js") |
15 |
| -mimetypes.add_type("text/css", ".css") |
16 |
| - |
17 |
| -sys.path.append(path.join(path.dirname(__file__), "..")) |
18 |
| - |
19 |
| -load_dotenv( |
20 |
| - path.join(path.dirname(__file__), "..", "..", ".env") |
21 |
| -) # Load environment variables from .env file |
22 |
| - |
23 |
| -app = Flask(__name__) |
24 |
| -env_helper: EnvHelper = EnvHelper() |
25 |
| - |
26 |
| - |
27 |
| -@app.route("/", defaults={"path": "index.html"}) |
28 |
| -@app.route("/<path:path>") |
29 |
| -def static_file(path): |
30 |
| - return app.send_static_file(path) |
31 |
| - |
32 |
| - |
33 |
| -@app.route("/api/config", methods=["GET"]) |
34 |
| -def get_config(): |
35 |
| - # Return the configuration data as JSON |
36 |
| - return jsonify( |
37 |
| - { |
38 |
| - "azureSpeechKey": env_helper.AZURE_SPEECH_KEY, |
39 |
| - "azureSpeechRegion": env_helper.AZURE_SPEECH_SERVICE_REGION, |
40 |
| - "AZURE_OPENAI_ENDPOINT": env_helper.AZURE_OPENAI_ENDPOINT, |
41 |
| - } |
42 |
| - ) |
43 |
| - |
44 |
| - |
45 |
| -def prepare_body_headers_with_data(request): |
46 |
| - request_messages = request.json["messages"] |
47 |
| - |
48 |
| - body = { |
49 |
| - "messages": request_messages, |
50 |
| - "temperature": float(env_helper.AZURE_OPENAI_TEMPERATURE), |
51 |
| - "max_tokens": int(env_helper.AZURE_OPENAI_MAX_TOKENS), |
52 |
| - "top_p": float(env_helper.AZURE_OPENAI_TOP_P), |
53 |
| - "stop": ( |
54 |
| - env_helper.AZURE_OPENAI_STOP_SEQUENCE.split("|") |
55 |
| - if env_helper.AZURE_OPENAI_STOP_SEQUENCE |
56 |
| - else None |
57 |
| - ), |
58 |
| - "stream": env_helper.SHOULD_STREAM, |
59 |
| - "data_sources": [ |
60 |
| - { |
61 |
| - "type": "azure_search", |
62 |
| - "parameters": { |
63 |
| - # authentication is set below |
64 |
| - "endpoint": env_helper.AZURE_SEARCH_SERVICE, |
65 |
| - "index_name": env_helper.AZURE_SEARCH_INDEX, |
66 |
| - "fields_mapping": { |
67 |
| - "content_fields": ( |
68 |
| - env_helper.AZURE_SEARCH_CONTENT_COLUMNS.split("|") |
69 |
| - if env_helper.AZURE_SEARCH_CONTENT_COLUMNS |
70 |
| - else [] |
71 |
| - ), |
72 |
| - "title_field": env_helper.AZURE_SEARCH_TITLE_COLUMN or None, |
73 |
| - "url_field": env_helper.AZURE_SEARCH_URL_COLUMN or None, |
74 |
| - "filepath_field": ( |
75 |
| - env_helper.AZURE_SEARCH_FILENAME_COLUMN or None |
76 |
| - ), |
77 |
| - }, |
78 |
| - "in_scope": env_helper.AZURE_SEARCH_ENABLE_IN_DOMAIN, |
79 |
| - "top_n_documents": env_helper.AZURE_SEARCH_TOP_K, |
80 |
| - "query_type": ( |
81 |
| - "semantic" |
82 |
| - if env_helper.AZURE_SEARCH_USE_SEMANTIC_SEARCH |
83 |
| - else "simple" |
84 |
| - ), |
85 |
| - "semantic_configuration": ( |
86 |
| - env_helper.AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG |
87 |
| - if env_helper.AZURE_SEARCH_USE_SEMANTIC_SEARCH |
88 |
| - and env_helper.AZURE_SEARCH_SEMANTIC_SEARCH_CONFIG |
89 |
| - else "" |
90 |
| - ), |
91 |
| - "role_information": env_helper.AZURE_OPENAI_SYSTEM_MESSAGE, |
92 |
| - }, |
93 |
| - } |
94 |
| - ], |
95 |
| - } |
96 |
| - |
97 |
| - headers = { |
98 |
| - "Content-Type": "application/json", |
99 |
| - "x-ms-useragent": "GitHubSampleWebApp/PublicAPI/1.0.0", |
100 |
| - } |
101 |
| - |
102 |
| - if env_helper.AZURE_AUTH_TYPE == "rbac": |
103 |
| - body["data_sources"][0]["parameters"]["authentication"] = { |
104 |
| - "type": "system_assigned_managed_identity" |
105 |
| - } |
106 |
| - headers["Authorization"] = f"Bearer {env_helper.AZURE_TOKEN_PROVIDER()}" |
107 |
| - else: |
108 |
| - body["data_sources"][0]["parameters"]["authentication"] = { |
109 |
| - "type": "api_key", |
110 |
| - "key": env_helper.AZURE_SEARCH_KEY, |
111 |
| - } |
112 |
| - headers["api-key"] = env_helper.AZURE_OPENAI_API_KEY |
113 |
| - |
114 |
| - return body, headers |
115 |
| - |
116 |
| - |
117 |
| -def stream_with_data(body, headers, endpoint): |
118 |
| - s = requests.Session() |
119 |
| - response = { |
120 |
| - "id": "", |
121 |
| - "model": "", |
122 |
| - "created": 0, |
123 |
| - "object": "", |
124 |
| - "choices": [ |
125 |
| - { |
126 |
| - "messages": [ |
127 |
| - { |
128 |
| - "content": "", |
129 |
| - "end_turn": False, |
130 |
| - "role": "tool", |
131 |
| - }, |
132 |
| - { |
133 |
| - "content": "", |
134 |
| - "end_turn": False, |
135 |
| - "role": "assistant", |
136 |
| - }, |
137 |
| - ] |
138 |
| - } |
139 |
| - ], |
140 |
| - } |
141 |
| - try: |
142 |
| - with s.post(endpoint, json=body, headers=headers, stream=True) as r: |
143 |
| - for line in r.iter_lines(chunk_size=10): |
144 |
| - if line: |
145 |
| - lineJson = json.loads(line.lstrip(b"data: ").decode("utf-8")) |
146 |
| - if "error" in lineJson: |
147 |
| - yield json.dumps(lineJson, ensure_ascii=False) + "\n" |
148 |
| - return |
149 |
| - |
150 |
| - if lineJson["choices"][0]["end_turn"]: |
151 |
| - response["choices"][0]["messages"][1]["end_turn"] = True |
152 |
| - yield json.dumps(response, ensure_ascii=False) + "\n" |
153 |
| - return |
154 |
| - |
155 |
| - response["id"] = lineJson["id"] |
156 |
| - response["model"] = lineJson["model"] |
157 |
| - response["created"] = lineJson["created"] |
158 |
| - response["object"] = lineJson["object"] |
159 |
| - |
160 |
| - delta = lineJson["choices"][0]["delta"] |
161 |
| - role = delta.get("role") |
162 |
| - |
163 |
| - if role == "assistant": |
164 |
| - response["choices"][0]["messages"][0]["content"] = json.dumps( |
165 |
| - delta["context"], |
166 |
| - ensure_ascii=False, |
167 |
| - ) |
168 |
| - else: |
169 |
| - response["choices"][0]["messages"][1]["content"] += delta[ |
170 |
| - "content" |
171 |
| - ] |
172 |
| - |
173 |
| - yield json.dumps(response, ensure_ascii=False) + "\n" |
174 |
| - except Exception as e: |
175 |
| - yield json.dumps({"error": str(e)}, ensure_ascii=False) + "\n" |
176 |
| - |
177 |
| - |
178 |
| -def conversation_with_data(request): |
179 |
| - body, headers = prepare_body_headers_with_data(request) |
180 |
| - endpoint = f"{env_helper.AZURE_OPENAI_ENDPOINT}openai/deployments/{env_helper.AZURE_OPENAI_MODEL}/chat/completions?api-version={env_helper.AZURE_OPENAI_API_VERSION}" |
181 |
| - |
182 |
| - if not env_helper.SHOULD_STREAM: |
183 |
| - r = requests.post(endpoint, headers=headers, json=body) |
184 |
| - status_code = r.status_code |
185 |
| - r = r.json() |
186 |
| - |
187 |
| - response = { |
188 |
| - "id": r["id"], |
189 |
| - "model": r["model"], |
190 |
| - "created": r["created"], |
191 |
| - "object": r["object"], |
192 |
| - "choices": [ |
193 |
| - { |
194 |
| - "messages": [ |
195 |
| - { |
196 |
| - "content": json.dumps( |
197 |
| - r["choices"][0]["message"]["context"], |
198 |
| - ensure_ascii=False, |
199 |
| - ), |
200 |
| - "end_turn": False, |
201 |
| - "role": "tool", |
202 |
| - }, |
203 |
| - { |
204 |
| - "content": r["choices"][0]["message"]["content"], |
205 |
| - "end_turn": True, |
206 |
| - "role": "assistant", |
207 |
| - }, |
208 |
| - ] |
209 |
| - } |
210 |
| - ], |
211 |
| - } |
212 |
| - |
213 |
| - return jsonify(response), status_code |
214 |
| - else: |
215 |
| - return Response( |
216 |
| - stream_with_data(body, headers, endpoint), |
217 |
| - mimetype="application/json-lines", |
218 |
| - ) |
219 |
| - |
220 |
| - |
221 |
| -def stream_without_data(response): |
222 |
| - responseText = "" |
223 |
| - for line in response: |
224 |
| - if not line.choices: |
225 |
| - continue |
226 |
| - |
227 |
| - deltaText = line.choices[0].delta.content |
228 |
| - |
229 |
| - if deltaText is None: |
230 |
| - return |
231 |
| - |
232 |
| - responseText += deltaText |
233 |
| - |
234 |
| - response_obj = { |
235 |
| - "id": line.id, |
236 |
| - "model": line.model, |
237 |
| - "created": line.created, |
238 |
| - "object": line.object, |
239 |
| - "choices": [{"messages": [{"role": "assistant", "content": responseText}]}], |
240 |
| - } |
241 |
| - yield json.dumps(response_obj, ensure_ascii=False) + "\n" |
242 |
| - |
243 |
| - |
244 |
| -def conversation_without_data(request): |
245 |
| - if env_helper.AZURE_AUTH_TYPE == "rbac": |
246 |
| - openai_client = AzureOpenAI( |
247 |
| - azure_endpoint=env_helper.AZURE_OPENAI_ENDPOINT, |
248 |
| - api_version=env_helper.AZURE_OPENAI_API_VERSION, |
249 |
| - azure_ad_token_provider=env_helper.AZURE_TOKEN_PROVIDER, |
250 |
| - ) |
251 |
| - else: |
252 |
| - openai_client = AzureOpenAI( |
253 |
| - azure_endpoint=env_helper.AZURE_OPENAI_ENDPOINT, |
254 |
| - api_version=env_helper.AZURE_OPENAI_API_VERSION, |
255 |
| - api_key=env_helper.AZURE_OPENAI_API_KEY, |
256 |
| - ) |
257 |
| - |
258 |
| - request_messages = request.json["messages"] |
259 |
| - messages = [{"role": "system", "content": env_helper.AZURE_OPENAI_SYSTEM_MESSAGE}] |
260 |
| - |
261 |
| - for message in request_messages: |
262 |
| - messages.append({"role": message["role"], "content": message["content"]}) |
263 |
| - |
264 |
| - # Azure Open AI takes the deployment name as the model name, "AZURE_OPENAI_MODEL" means deployment name. |
265 |
| - response = openai_client.chat.completions.create( |
266 |
| - model=env_helper.AZURE_OPENAI_MODEL, |
267 |
| - messages=messages, |
268 |
| - temperature=float(env_helper.AZURE_OPENAI_TEMPERATURE), |
269 |
| - max_tokens=int(env_helper.AZURE_OPENAI_MAX_TOKENS), |
270 |
| - top_p=float(env_helper.AZURE_OPENAI_TOP_P), |
271 |
| - stop=( |
272 |
| - env_helper.AZURE_OPENAI_STOP_SEQUENCE.split("|") |
273 |
| - if env_helper.AZURE_OPENAI_STOP_SEQUENCE |
274 |
| - else None |
275 |
| - ), |
276 |
| - stream=env_helper.SHOULD_STREAM, |
277 |
| - ) |
278 |
| - |
279 |
| - if not env_helper.SHOULD_STREAM: |
280 |
| - response_obj = { |
281 |
| - "id": response.id, |
282 |
| - "model": response.model, |
283 |
| - "created": response.created, |
284 |
| - "object": response.object, |
285 |
| - "choices": [ |
286 |
| - { |
287 |
| - "messages": [ |
288 |
| - { |
289 |
| - "role": "assistant", |
290 |
| - "content": response.choices[0].message.content, |
291 |
| - } |
292 |
| - ] |
293 |
| - } |
294 |
| - ], |
295 |
| - } |
296 |
| - |
297 |
| - return jsonify(response_obj), 200 |
298 |
| - else: |
299 |
| - return Response( |
300 |
| - stream_without_data(response), mimetype="application/json-lines" |
301 |
| - ) |
302 |
| - |
303 |
| - |
304 |
| -@app.route("/api/conversation/azure_byod", methods=["POST"]) |
305 |
| -def conversation_azure_byod(): |
306 |
| - try: |
307 |
| - if env_helper.should_use_data(): |
308 |
| - return conversation_with_data(request) |
309 |
| - else: |
310 |
| - return conversation_without_data(request) |
311 |
| - except Exception as e: |
312 |
| - errorMessage = str(e) |
313 |
| - logging.exception(f"Exception in /api/conversation/azure_byod | {errorMessage}") |
314 |
| - return ( |
315 |
| - jsonify( |
316 |
| - { |
317 |
| - "error": "Exception in /api/conversation/azure_byod. See log for more details." |
318 |
| - } |
319 |
| - ), |
320 |
| - 500, |
321 |
| - ) |
322 |
| - |
323 |
| - |
324 |
| -def get_message_orchestrator(): |
325 |
| - from backend.batch.utilities.helpers.OrchestratorHelper import Orchestrator |
326 |
| - |
327 |
| - return Orchestrator() |
328 |
| - |
329 |
| - |
330 |
| -def get_orchestrator_config(): |
331 |
| - from backend.batch.utilities.helpers.ConfigHelper import ConfigHelper |
332 |
| - |
333 |
| - return ConfigHelper.get_active_config_or_default().orchestrator |
334 |
| - |
335 |
| - |
336 |
| -@app.route("/api/conversation/custom", methods=["POST"]) |
337 |
| -def conversation_custom(): |
338 |
| - message_orchestrator = get_message_orchestrator() |
339 |
| - |
340 |
| - try: |
341 |
| - user_message = request.json["messages"][-1]["content"] |
342 |
| - conversation_id = request.json["conversation_id"] |
343 |
| - user_assistant_messages = list( |
344 |
| - filter( |
345 |
| - lambda x: x["role"] in ("user", "assistant"), |
346 |
| - request.json["messages"][0:-1], |
347 |
| - ) |
348 |
| - ) |
349 |
| - |
350 |
| - messages = message_orchestrator.handle_message( |
351 |
| - user_message=user_message, |
352 |
| - chat_history=user_assistant_messages, |
353 |
| - conversation_id=conversation_id, |
354 |
| - orchestrator=get_orchestrator_config(), |
355 |
| - ) |
356 |
| - |
357 |
| - response_obj = { |
358 |
| - "id": "response.id", |
359 |
| - "model": env_helper.AZURE_OPENAI_MODEL, |
360 |
| - "created": "response.created", |
361 |
| - "object": "response.object", |
362 |
| - "choices": [{"messages": messages}], |
363 |
| - } |
364 |
| - |
365 |
| - return jsonify(response_obj), 200 |
366 |
| - |
367 |
| - except Exception as e: |
368 |
| - errorMessage = str(e) |
369 |
| - logging.exception(f"Exception in /api/conversation/custom | {errorMessage}") |
370 |
| - return ( |
371 |
| - jsonify( |
372 |
| - { |
373 |
| - "error": "Exception in /api/conversation/custom. See log for more details." |
374 |
| - } |
375 |
| - ), |
376 |
| - 500, |
377 |
| - ) |
378 |
| - |
| 4 | +app = create_app() |
379 | 5 |
|
380 | 6 | if __name__ == "__main__":
|
381 | 7 | app.run()
|
|
0 commit comments