|
3 | 3 | [clojure.test :refer [deftest is testing]] |
4 | 4 | [integration.eca :as eca] |
5 | 5 | [integration.fixture :as fixture] |
| 6 | + [llm-mock.mocks :as llm.mocks] |
6 | 7 | [matcher-combinators.matchers :as m] |
7 | 8 | [matcher-combinators.test :refer [match?]])) |
8 | 9 |
|
9 | 10 | (eca/clean-after-test) |
10 | 11 |
|
| 12 | +(defn match-content [chat-id request-id role content] |
| 13 | + (is (match? |
| 14 | + {:chatId chat-id |
| 15 | + :requestId request-id |
| 16 | + :role role |
| 17 | + :content content} |
| 18 | + (eca/client-awaits-server-notification :chat/contentReceived)))) |
| 19 | + |
11 | 20 | (deftest simple-text |
12 | 21 | (eca/start-process!) |
13 | 22 |
|
14 | 23 | (eca/request! (fixture/initialize-request)) |
15 | 24 | (eca/notify! (fixture/initialized-notification)) |
16 | | - (testing "simple hello message with reply" |
17 | | - (let [resp (eca/request! (fixture/chat-prompt-request |
18 | | - {:request-id 0 |
19 | | - :message "Hello there!"})) |
20 | | - chat-id (:chatId resp)] |
21 | | - |
22 | | - (is (match? |
23 | | - {:chatId (m/pred string?) |
24 | | - :model "claude-sonnet-4-20250514" |
25 | | - :status "success"} |
26 | | - resp)) |
27 | | - |
28 | | - (is (match? |
29 | | - {:chatId chat-id |
30 | | - :requestId 0 |
31 | | - :role "user" |
32 | | - :content {:type "text" :text "Hello there!\n"}} |
33 | | - (eca/client-awaits-server-notification :chat/contentReceived))) |
34 | | - (is (match? |
35 | | - {:chatId chat-id |
36 | | - :requestId 0 |
37 | | - :role "system" |
38 | | - :content {:type "progress" :state "running" :text "Waiting model"}} |
39 | | - (eca/client-awaits-server-notification :chat/contentReceived)))))) |
| 25 | + (let [chat-id* (atom nil)] |
| 26 | + (testing "We send a simple hello message" |
| 27 | + (llm.mocks/set-case! :simple-text-0) |
| 28 | + (let [req-id 0 |
| 29 | + resp (eca/request! (fixture/chat-prompt-request |
| 30 | + {:request-id req-id |
| 31 | + :model "gpt-5" |
| 32 | + :message "Tell me a joke!"})) |
| 33 | + chat-id (reset! chat-id* (:chatId resp))] |
| 34 | + |
| 35 | + (is (match? |
| 36 | + {:chatId (m/pred string?) |
| 37 | + :model "gpt-5" |
| 38 | + :status "success"} |
| 39 | + resp)) |
| 40 | + |
| 41 | + (match-content chat-id req-id "user" {:type "text" :text "Tell me a joke!\n"}) |
| 42 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
| 43 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"}) |
| 44 | + (match-content chat-id req-id "assistant" {:type "text" :text "Knock"}) |
| 45 | + (match-content chat-id req-id "assistant" {:type "text" :text " knock!"}) |
| 46 | + (match-content chat-id req-id "system" {:type "usage" |
| 47 | + :messageInputTokens 10 |
| 48 | + :messageOutputTokens 20 |
| 49 | + :sessionTokens 30 |
| 50 | + :messageCost (m/pred string?) |
| 51 | + :sessionCost (m/pred string?)}) |
| 52 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}))) |
| 53 | + |
| 54 | + (testing "We reply" |
| 55 | + (llm.mocks/set-case! :simple-text-1) |
| 56 | + (let [req-id 1 |
| 57 | + resp (eca/request! (fixture/chat-prompt-request |
| 58 | + {:chat-id @chat-id* |
| 59 | + :request-id req-id |
| 60 | + :model "gpt-5" |
| 61 | + :message "Who's there?"})) |
| 62 | + chat-id @chat-id*] |
| 63 | + |
| 64 | + (is (match? |
| 65 | + {:chatId (m/pred string?) |
| 66 | + :model "gpt-5" |
| 67 | + :status "success"} |
| 68 | + resp)) |
| 69 | + |
| 70 | + (match-content chat-id req-id "user" {:type "text" :text "Who's there?\n"}) |
| 71 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
| 72 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"}) |
| 73 | + (match-content chat-id req-id "assistant" {:type "text" :text "Foo"}) |
| 74 | + (match-content chat-id req-id "system" {:type "usage" |
| 75 | + :messageInputTokens 10 |
| 76 | + :messageOutputTokens 5 |
| 77 | + :sessionTokens 45 |
| 78 | + :messageCost (m/pred string?) |
| 79 | + :sessionCost (m/pred string?)}) |
| 80 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}))) |
| 81 | + |
| 82 | + (testing "model reply again keeping context" |
| 83 | + (llm.mocks/set-case! :simple-text-2) |
| 84 | + (let [req-id 2 |
| 85 | + resp (eca/request! (fixture/chat-prompt-request |
| 86 | + {:chat-id @chat-id* |
| 87 | + :request-id req-id |
| 88 | + :model "gpt-5" |
| 89 | + :message "What foo?"})) |
| 90 | + chat-id @chat-id*] |
| 91 | + |
| 92 | + (is (match? |
| 93 | + {:chatId (m/pred string?) |
| 94 | + :model "gpt-5" |
| 95 | + :status "success"} |
| 96 | + resp)) |
| 97 | + |
| 98 | + (match-content chat-id req-id "user" {:type "text" :text "What foo?\n"}) |
| 99 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
| 100 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"}) |
| 101 | + (match-content chat-id req-id "assistant" {:type "text" :text "Foo"}) |
| 102 | + (match-content chat-id req-id "assistant" {:type "text" :text " bar!"}) |
| 103 | + (match-content chat-id req-id "assistant" {:type "text" :text "\n\n"}) |
| 104 | + (match-content chat-id req-id "assistant" {:type "text" :text "Ha!"}) |
| 105 | + (match-content chat-id req-id "system" {:type "usage" |
| 106 | + :messageInputTokens 5 |
| 107 | + :messageOutputTokens 15 |
| 108 | + :sessionTokens 65 |
| 109 | + :messageCost (m/pred string?) |
| 110 | + :sessionCost (m/pred string?)}) |
| 111 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}))))) |
0 commit comments