Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit a9d386e

Browse files
authored
Add OPENAI_API_VERSION constant to routes (#1015)
1 parent 67f678b commit a9d386e

File tree

2 files changed

+6
-4
lines changed

2 files changed

+6
-4
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ is plaintext and will not be formatted to the OpenAI API specification. If `stre
222222
[skip default]: begin
223223

224224
```
225-
curl http://127.0.0.1:5000/chat \
225+
curl http://127.0.0.1:5000/v1/chat \
226226
-H "Content-Type: application/json" \
227227
-d '{
228228
"model": "llama3.1",

server.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
from flask import Flask, request, Response
1717
from generate import GeneratorArgs
1818

19+
OPENAI_API_VERSION = "v1"
20+
1921

2022
def create_app(args):
2123
"""
@@ -33,7 +35,7 @@ def _del_none(d: Union[Dict, List]) -> Union[Dict, List]:
3335
return [_del_none(v) for v in d if v]
3436
return d
3537

36-
@app.route("/chat", methods=["POST"])
38+
@app.route(f"/{OPENAI_API_VERSION}/chat", methods=["POST"])
3739
def chat_endpoint():
3840
"""
3941
Endpoint for the Chat API. This endpoint is used to generate a response to a user prompt.
@@ -75,11 +77,11 @@ def chunk_processor(chunked_completion_generator):
7577

7678
return json.dumps(_del_none(asdict(response)))
7779

78-
@app.route("/models", methods=["GET"])
80+
@app.route(f"/{OPENAI_API_VERSION}/models", methods=["GET"])
7981
def models_endpoint():
8082
return json.dumps(asdict(get_model_info_list(args)))
8183

82-
@app.route("/models/<model_id>", methods=["GET"])
84+
@app.route(f"/{OPENAI_API_VERSION}/models/<model_id>", methods=["GET"])
8385
def models_retrieve_endpoint(model_id):
8486
if response := retrieve_model_info(args, model_id):
8587
return json.dumps(asdict(response))

0 commit comments

Comments
 (0)