Skip to content

Commit a8a082c

Browse files
committed
Allow override LLM providers payload
1 parent b4721dd commit a8a082c

File tree

7 files changed

+68
-35
lines changed

7 files changed

+68
-35
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
## Unreleased
44

55
- Fix openai skip streaming response corner cases.
6+
- Allow override payload of any LLM provider.
67

78
## 0.20.0
89

docs/configuration.md

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,25 @@ Just add to your config the `commands` pointing to `.md` files that will be sear
156156
}
157157
```
158158

159+
## Overriding model payloads
160+
161+
It's possible to override the payload sent to LLMs via `models <modelName> extraPayload` config, this way you can configure custom LLM settings like `temperature`, `reasoning_effort`, `verbosity` etc.
162+
This config will be merged with current default used by ECA.
163+
164+
Example:
165+
166+
```javascript
167+
{
168+
"models": {
169+
"gpt-5": {
170+
"extraPayload": {
171+
"verbosity": "high"
172+
}
173+
}
174+
}
175+
}
176+
```
177+
159178
## All configs
160179

161180
### Schema
@@ -191,6 +210,9 @@ interface Config {
191210
key?: string;
192211
keyEnv?: string;
193212
}};
213+
models: {[key: string]: {
214+
extraPayload: {[key: string]: any}
215+
}};
194216
ollama?: {
195217
host: string;
196218
port: string;
@@ -226,6 +248,7 @@ interface Config {
226248
"mcpTimeoutSeconds" : 10,
227249
"mcpServers" : [],
228250
"customProviders": {},
251+
"models": {},
229252
"ollama" : {
230253
"host" : "http://localhost",
231254
"port" : 11434,

src/eca/config.clj

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
:disabledTools []
2727
:mcpTimeoutSeconds 60
2828
:mcpServers {}
29+
:models {}
2930
:ollama {:host "http://localhost"
3031
:port 11434
3132
:useTools true

src/eca/llm_api.clj

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,7 @@
110110
custom-models (set (mapcat (fn [[k v]]
111111
(map #(str (name k) "/" %) (:models v)))
112112
custom-providers))
113+
extra-payload (get-in config [:models (keyword model) :extraPayload])
113114
callbacks {:on-message-received on-message-received-wrapper
114115
:on-error on-error-wrapper
115116
:on-prepare-tool-call on-prepare-tool-call-wrapper
@@ -133,6 +134,7 @@
133134
:past-messages past-messages
134135
:tools tools
135136
:web-search web-search
137+
:extra-payload extra-payload
136138
:api-url (openai-api-url)
137139
:api-key (openai-api-key config)}
138140
callbacks)
@@ -151,6 +153,7 @@
151153
:past-messages past-messages
152154
:tools tools
153155
:web-search web-search
156+
:extra-payload extra-payload
154157
:api-url (anthropic-api-url)
155158
:api-key (anthropic-api-key config)}
156159
callbacks)
@@ -164,7 +167,8 @@
164167
:instructions instructions
165168
:user-messages user-messages
166169
:past-messages past-messages
167-
:tools tools}
170+
:tools tools
171+
:extra-payload extra-payload}
168172
callbacks)
169173

170174
(contains? custom-models model)
@@ -186,6 +190,7 @@
186190
:past-messages past-messages
187191
:web-search web-search
188192
:tools tools
193+
:extra-payload extra-payload
189194
:api-url url
190195
:api-key key}
191196
callbacks))

src/eca/llm_providers/anthropic.clj

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -96,22 +96,23 @@
9696

9797
(defn completion!
9898
[{:keys [model user-messages temperature instructions max-output-tokens
99-
api-url api-key reason? reason-tokens past-messages tools web-search]
99+
api-url api-key reason? reason-tokens past-messages tools web-search extra-payload]
100100
:or {temperature 1.0}}
101101
{:keys [on-message-received on-error on-reason on-prepare-tool-call on-tool-called on-usage-updated]}]
102102
(let [messages (concat (normalize-messages past-messages)
103103
(normalize-messages user-messages))
104-
body (assoc-some
105-
{:model model
106-
:messages (add-cache-to-last-message messages)
107-
:max_tokens max-output-tokens
108-
:temperature temperature
109-
:stream true
110-
:tools (->tools tools web-search)
111-
:system [{:type "text" :text instructions :cache_control {:type "ephemeral"}}]}
112-
:thinking (when (and reason? reason-tokens (> reason-tokens 0))
113-
{:type "enabled"
114-
:budget_tokens reason-tokens}))
104+
body (merge (assoc-some
105+
{:model model
106+
:messages (add-cache-to-last-message messages)
107+
:max_tokens max-output-tokens
108+
:temperature temperature
109+
:stream true
110+
:tools (->tools tools web-search)
111+
:system [{:type "text" :text instructions :cache_control {:type "ephemeral"}}]}
112+
:thinking (when (and reason? reason-tokens (> reason-tokens 0))
113+
{:type "enabled"
114+
:budget_tokens reason-tokens}))
115+
extra-payload)
115116

116117
on-response-fn
117118
(fn handle-response [event data content-block* reason-id]

src/eca/llm_providers/ollama.clj

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,15 +98,16 @@
9898

9999
(defn completion! [{:keys [model user-messages reason? instructions host port past-messages tools]}
100100
{:keys [on-message-received on-error on-prepare-tool-call on-tool-called
101-
on-reason]}]
101+
on-reason extra-payload]}]
102102
(let [messages (concat
103103
(normalize-messages (concat [{:role "system" :content instructions}] past-messages))
104104
(normalize-messages user-messages))
105-
body {:model model
106-
:messages messages
107-
:think reason?
108-
:tools (->tools tools)
109-
:stream true}
105+
body (merge {:model model
106+
:messages messages
107+
:think reason?
108+
:tools (->tools tools)
109+
:stream true}
110+
extra-payload)
110111
url (format chat-url (base-url host port))
111112
tool-calls* (atom {})
112113
on-response-fn (fn handle-response [rid _event data reasoning?* reason-id]

src/eca/llm_providers/openai.clj

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -64,27 +64,28 @@
6464
past-messages))
6565

6666
(defn completion! [{:keys [model user-messages instructions reason? temperature api-key api-url
67-
max-output-tokens past-messages tools web-search]}
67+
max-output-tokens past-messages tools web-search extra-payload]}
6868
{:keys [on-message-received on-error on-prepare-tool-call on-tool-called on-reason on-usage-updated]}]
6969
(let [input (concat (normalize-messages past-messages)
7070
(normalize-messages user-messages))
7171
tools (cond-> tools
7272
web-search (conj {:type "web_search_preview"}))
73-
body {:model model
74-
:input input
75-
:prompt_cache_key (str (System/getProperty "user.name") "@ECA")
76-
;; TODO support parallel
77-
:parallel_tool_calls false
78-
:instructions instructions
79-
;; TODO allow user specify custom temperature (default 1.0)
80-
:temperature temperature
81-
:tools tools
82-
:reasoning (when reason?
83-
{:effort "medium"
84-
:summary "detailed"})
85-
:stream true
86-
;; :verbosity "medium"
87-
:max_output_tokens max-output-tokens}
73+
body (merge {:model model
74+
:input input
75+
:prompt_cache_key (str (System/getProperty "user.name") "@ECA")
76+
;; TODO support parallel
77+
:parallel_tool_calls false
78+
:instructions instructions
79+
;; TODO allow user specify custom temperature (default 1.0)
80+
:temperature temperature
81+
:tools tools
82+
:reasoning (when reason?
83+
{:effort "medium"
84+
:summary "detailed"})
85+
:stream true
86+
;; :verbosity "medium"
87+
:max_output_tokens max-output-tokens}
88+
extra-payload)
8889
tool-call-by-item-id* (atom {})
8990
on-response-fn
9091
(fn handle-response [event data]

0 commit comments

Comments
 (0)