[Question]: Cannot connect to local OpenAI compatible API #4757
Unanswered
tsoernes
asked this question in
Troubleshooting
Replies: 2 comments 2 replies
-
Try changing |
Beta Was this translation helpful? Give feedback.
0 replies
-
@jmaddington Okay. Now I am able to connect, and the API response. But there librechat shows an error. librechat log:
API log:
API code: default_response = {
"choices": [
{
"content_filter_results": {
"hate": {"filtered": False, "severity": "safe"},
"self_harm": {"filtered": False, "severity": "safe"},
"sexual": {"filtered": False, "severity": "safe"},
"violence": {"filtered": False, "severity": "safe"},
},
"finish_reason": "stop",
"index": 0,
"logprobs": None,
"message": {
"audio": None,
"content": "PLACEHOLDER",
"function_call": None,
"refusal": None,
"role": "assistant",
"tool_calls": None,
},
}
],
"created": 1731669457,
"id": "chatcmpl-ATocTflkPHoUliboru2hb5h9tk0ox",
"model": "gpt-4o",
"object": "chat.completion",
"prompt_filter_results": [
{
"content_filter_results": {
"hate": {"filtered": False, "severity": "safe"},
"self_harm": {"filtered": False, "severity": "safe"},
"sexual": {"filtered": False, "severity": "safe"},
"violence": {"filtered": False, "severity": "safe"},
},
"prompt_index": 0,
}
],
"service_tier": None,
"system_fingerprint": "fp_000eow_rag",
"usage": {
"completion_tokens": 1,
"prompt_tokens": 1,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0,
},
"prompt_tokens_details": {"audio_tokens": 0, "cached_tokens": 0},
"total_tokens": 2,
},
}
@app.route(route="v1/chat/completions", auth_level=func.AuthLevel.ANONYMOUS)
def chat_completions(req: func.HttpRequest) -> func.HttpResponse:
# OpenAI API compatible
print(f"Request headers: {req.headers}")
try:
print(f"Request: {req.get_json()}")
except Exception as e:
return func.HttpResponse(str(e), status_code=400)
token = req.headers.get("Authorization")
if not token:
return func.HttpResponse("Unauthorized", status_code=401)
token = token.replace("Bearer ", "")
if token != function_key:
return func.HttpResponse("Incorrect function key", status_code=401)
logging.info(f"Python HTTP trigger function processed a request: {req}")
try:
js = req.get_json()
messages = js["messages"]
print(f"{messages=}")
content = messages[-1]["content"]
if isinstance(content, list):
query = content[-1]["text"]
else:
query = content
_, response = run_query(query=query, history=messages[:-1])
default_response["id"] = "chatcmpl-" + "".join(
random.choices(ascii_letters, k=29)
)
default_response["created"] = int(time.time())
default_response["choices"][0]["message"]["content"] = format_response(response)
js = json.dumps(default_response)
print(f"Returning {js}")
return func.HttpResponse(js, status_code=200, mimetype="application/json")
except Exception as e:
stackprinter.show_current_exception()
print(f"{req=}, {req.headers=}, {req.params=}, {type(e)}, {e=}")
logging.error(f"{req=}, {req.headers=}, {req.params=}, {type(e)}, {e=}")
return func.HttpResponse(str(e), status_code=400) |
Beta Was this translation helpful? Give feedback.
2 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
Uh oh!
There was an error while loading. Please reload this page.
-
What is your question?
I have made a OpenAI compatible API that runs locally. I'm trying to connect to it with librechat.
I have this in librechat.yaml:
And I can successfully use the API within the container:
I can also select the model in librechat, but not use it:
Logs say:
More Details
Let me know if you need any details
What is the main subject of your question?
Endpoints
Screenshots
No response
Code of Conduct
Beta Was this translation helpful? Give feedback.
All reactions