-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlibrechat.yaml
More file actions
38 lines (35 loc) · 1.25 KB
/
librechat.yaml
File metadata and controls
38 lines (35 loc) · 1.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# LibreChat custom endpoint configuration
#
# Routes LLM requests through the Stihia proxy.
# The proxy forwards them to the real provider API while applying
# Stihia threat-detection guardrails in parallel.
#
# Each endpoint sets:
# X-Upstream-Base-URL — the real provider base URL.
# X-User-ID — the LibreChat user who sent the message.
# X-Conversation-ID — the current LibreChat conversation.
# X-Message-ID — the current message ID.
#
# Note: The Stihia process key (model name or agent ID) is extracted
# directly from the request body by the proxy — no header needed.
version: "1.1.7"
cache: true
endpoints:
custom:
# ---- OpenAI via Stihia Proxy ----
- name: "OpenAI"
apiKey: "${OPENAI_API_KEY}"
baseURL: "http://stihia-proxy:4005/v1"
models:
default: ["gpt-5.4", "gpt-5.4-mini", "gpt-5.4-nano", "gpt-5.3-codex"]
fetch: false
titleConvo: true
titleModel: "gpt-5.4"
summarize: false
forcePrompt: false
modelDisplayLabel: "OpenAI"
headers:
X-Upstream-Base-URL: "https://api.openai.com"
X-User-ID: "{{LIBRECHAT_USER_ID}}"
X-Conversation-ID: "{{LIBRECHAT_BODY_CONVERSATIONID}}"
X-Message-ID: "{{LIBRECHAT_BODY_MESSAGEID}}"