Skip to content

Commit 57fedd4

Browse files
committed
Add integration tests to openai-chat custom provider
1 parent fa6824c commit 57fedd4

File tree

3 files changed

+198
-0
lines changed

3 files changed

+198
-0
lines changed

integration-test/integration/chat/custom_provider_test.clj

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,3 +133,108 @@
133133
{:role "assistant" :content [{:type "output_text" :text "Foo"}]}
134134
{:role "user" :content [{:type "input_text" :text "What foo?"}]}]}
135135
llm.mocks/*last-req-body*))))))
136+
137+
(deftest openai-chat-simple-text
138+
(eca/start-process!)
139+
140+
(testing "We use the default model from custom provider"
141+
(is (match?
142+
{:models (m/embeds ["myProvider/deepseek-coder"])
143+
:chatDefaultModel "myProvider/deepseek-coder"}
144+
(eca/request! (fixture/initialize-request
145+
{:initializationOptions
146+
(merge fixture/default-init-options
147+
{:customProviders
148+
{"myProvider"
149+
{:api "openai-chat"
150+
:url (str "http://localhost:" llm-mock.server/port "/openai-chat")
151+
:key "foobar"
152+
:models ["deepseek-chat" "deepseek-coder"]
153+
:defaultModel "deepseek-coder"}}})
154+
:capabilities {:codeAssistant {:chat {}}}})))))
155+
(eca/notify! (fixture/initialized-notification))
156+
(let [chat-id* (atom nil)]
157+
(testing "We send a simple hello message"
158+
(llm.mocks/set-case! :simple-text-0)
159+
(let [req-id 0
160+
resp (eca/request! (fixture/chat-prompt-request
161+
{:request-id req-id
162+
:model "myProvider/deepseek-coder"
163+
:message "Tell me a joke!"}))
164+
chat-id (reset! chat-id* (:chatId resp))]
165+
166+
(is (match?
167+
{:chatId (m/pred string?)
168+
:model "myProvider/deepseek-coder"
169+
:status "success"}
170+
resp))
171+
172+
(match-content chat-id req-id "user" {:type "text" :text "Tell me a joke!\n"})
173+
(match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"})
174+
(match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"})
175+
(match-content chat-id req-id "assistant" {:type "text" :text "Knock"})
176+
(match-content chat-id req-id "assistant" {:type "text" :text " knock!"})
177+
(match-content chat-id req-id "system" {:type "progress" :state "finished"})
178+
(is (match?
179+
{:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]}]
180+
:instructions (m/pred string?)}
181+
llm.mocks/*last-req-body*))))
182+
183+
(testing "We reply"
184+
(llm.mocks/set-case! :simple-text-1)
185+
(let [req-id 1
186+
resp (eca/request! (fixture/chat-prompt-request
187+
{:chat-id @chat-id*
188+
:request-id req-id
189+
:model "myProvider/deepseek-coder"
190+
:message "Who's there?"}))
191+
chat-id @chat-id*]
192+
193+
(is (match?
194+
{:chatId (m/pred string?)
195+
:model "myProvider/deepseek-coder"
196+
:status "success"}
197+
resp))
198+
199+
(match-content chat-id req-id "user" {:type "text" :text "Who's there?\n"})
200+
(match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"})
201+
(match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"})
202+
(match-content chat-id req-id "assistant" {:type "text" :text "Foo"})
203+
(match-content chat-id req-id "system" {:type "progress" :state "finished"})
204+
(is (match?
205+
{:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]}
206+
{:role "assistant" :content [{:type "output_text" :text "Knock knock!"}]}
207+
{:role "user" :content [{:type "input_text" :text "Who's there?"}]}]}
208+
llm.mocks/*last-req-body*))))
209+
210+
(testing "model reply again keeping context"
211+
(llm.mocks/set-case! :simple-text-2)
212+
(let [req-id 2
213+
resp (eca/request! (fixture/chat-prompt-request
214+
{:chat-id @chat-id*
215+
:request-id req-id
216+
:model "myProvider/deepseek-coder"
217+
:message "What foo?"}))
218+
chat-id @chat-id*]
219+
220+
(is (match?
221+
{:chatId (m/pred string?)
222+
:model "myProvider/deepseek-coder"
223+
:status "success"}
224+
resp))
225+
226+
(match-content chat-id req-id "user" {:type "text" :text "What foo?\n"})
227+
(match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"})
228+
(match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"})
229+
(match-content chat-id req-id "assistant" {:type "text" :text "Foo"})
230+
(match-content chat-id req-id "assistant" {:type "text" :text " bar!"})
231+
(match-content chat-id req-id "assistant" {:type "text" :text "\n\n"})
232+
(match-content chat-id req-id "assistant" {:type "text" :text "Ha!"})
233+
(match-content chat-id req-id "system" {:type "progress" :state "finished"})
234+
(is (match?
235+
{:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]}
236+
{:role "assistant" :content [{:type "output_text" :text "Knock knock!"}]}
237+
{:role "user" :content [{:type "input_text" :text "Who's there?"}]}
238+
{:role "assistant" :content [{:type "output_text" :text "Foo"}]}
239+
{:role "user" :content [{:type "input_text" :text "What foo?"}]}]}
240+
llm.mocks/*last-req-body*))))))
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
(ns llm-mock.openai-chat
2+
(:require
3+
[cheshire.core :as json]
4+
[clojure.string :as string]
5+
[llm-mock.mocks :as llm.mocks]
6+
[org.httpkit.server :as hk]))
7+
8+
(defn ^:private send-sse!
9+
"Send a single SSE data line with a JSON payload, followed by a blank line."
10+
[ch m]
11+
(hk/send! ch (str "data: " (json/generate-string m) "\n\n") false))
12+
13+
(defn ^:private messages->normalized-input
14+
"Transforms OpenAI Chat messages into the canonical ECA :input + :instructions format
15+
used by tests for assertions. We extract the first system message as :instructions
16+
and convert remaining messages into :input entries with input_text/output_text."
17+
[messages]
18+
(let [instructions (some (fn [{:keys [role content]}]
19+
(when (= role "system")
20+
(if (string? content)
21+
(string/trim content)
22+
(->> content (map :text) (remove nil?) (string/join "\n")))))
23+
messages)
24+
to-entry (fn [{:keys [role content]}]
25+
(when (#{"user" "assistant"} role)
26+
(let [text (cond
27+
(string? content) (string/trim content)
28+
(sequential? content) (->> content (map :text) (remove nil?) (string/join "\n"))
29+
:else (str content))]
30+
(when-not (string/blank? text)
31+
{:role role
32+
:content [(if (= role "user")
33+
{:type "input_text" :text text}
34+
{:type "output_text" :text text})]}))))]
35+
{:instructions instructions
36+
:input (->> messages
37+
(remove #(= "system" (:role %)))
38+
(map to-entry)
39+
(remove nil?)
40+
vec)}))
41+
42+
(defn ^:private simple-text-0 [ch]
43+
;; Stream two content chunks, then a usage chunk, then a finish chunk
44+
(send-sse! ch {:choices [{:delta {:content "Knock"}}]})
45+
(send-sse! ch {:choices [{:delta {:content " knock!"}}]})
46+
(send-sse! ch {:usage {:prompt_tokens 10 :completion_tokens 20}})
47+
(send-sse! ch {:choices [{:delta {} :finish_reason "stop"}]})
48+
(hk/close ch))
49+
50+
(defn ^:private simple-text-1 [ch]
51+
(send-sse! ch {:choices [{:delta {:content "Foo"}}]})
52+
(send-sse! ch {:usage {:prompt_tokens 10 :completion_tokens 5}})
53+
(send-sse! ch {:choices [{:delta {} :finish_reason "stop"}]})
54+
(hk/close ch))
55+
56+
(defn ^:private simple-text-2 [ch]
57+
(send-sse! ch {:choices [{:delta {:content "Foo"}}]})
58+
(send-sse! ch {:choices [{:delta {:content " bar!"}}]})
59+
(send-sse! ch {:choices [{:delta {:content "\n\n"}}]})
60+
(send-sse! ch {:choices [{:delta {:content "Ha!"}}]})
61+
(send-sse! ch {:usage {:prompt_tokens 5 :completion_tokens 15}})
62+
(send-sse! ch {:choices [{:delta {} :finish_reason "stop"}]})
63+
(hk/close ch))
64+
65+
(defn handle-openai-chat [req]
66+
;; Capture and normalize the request body for assertions in tests
67+
(when-let [body (some-> (slurp (:body req)) (json/parse-string true))]
68+
(let [messages (:messages body)
69+
normalized (messages->normalized-input messages)]
70+
(llm.mocks/set-last-req-body! (merge normalized (select-keys body [:tools])))))
71+
(hk/as-channel
72+
req
73+
{:on-open (fn [ch]
74+
;; Send initial response headers for SSE
75+
(hk/send! ch {:status 200
76+
:headers {"Content-Type" "text/event-stream; charset=utf-8"
77+
"Cache-Control" "no-cache"
78+
"Connection" "keep-alive"}}
79+
false)
80+
(case llm.mocks/*case*
81+
:simple-text-0 (simple-text-0 ch)
82+
:simple-text-1 (simple-text-1 ch)
83+
:simple-text-2 (simple-text-2 ch)
84+
;; default fallback
85+
(do
86+
(send-sse! ch {:choices [{:delta {:content "hello"}}]})
87+
(send-sse! ch {:choices [{:delta {} :finish_reason "stop"}]})
88+
(hk/close ch))))}))

integration-test/llm_mock/server.clj

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
(:require
33
[llm-mock.anthropic :as llm-mock.anthropic]
44
[llm-mock.openai :as llm-mock.openai]
5+
[llm-mock.openai-chat :as llm-mock.openai-chat]
56
[llm-mock.ollama :as llm-mock.ollama]
67
[org.httpkit.server :as hk]))
78

@@ -16,6 +17,10 @@
1617
(= uri "/openai/v1/responses"))
1718
(llm-mock.openai/handle-openai-responses req)
1819

20+
(and (= :post request-method)
21+
(= uri "/openai-chat/chat/completions"))
22+
(llm-mock.openai-chat/handle-openai-chat req)
23+
1924
(and (= :post request-method)
2025
(= uri "/anthropic/v1/messages"))
2126
(llm-mock.anthropic/handle-anthropic-messages req)

0 commit comments

Comments
 (0)