|
| 1 | +(ns integration.chat.custom-provider-test |
| 2 | + (:require |
| 3 | + [clojure.test :refer [deftest is testing]] |
| 4 | + [integration.eca :as eca] |
| 5 | + [integration.fixture :as fixture] |
| 6 | + [integration.helper :refer [match-content] :as h] |
| 7 | + [llm-mock.mocks :as llm.mocks] |
| 8 | + [llm-mock.server :as llm-mock.server] |
| 9 | + [matcher-combinators.matchers :as m] |
| 10 | + [matcher-combinators.test :refer [match?]])) |
| 11 | + |
| 12 | +(eca/clean-after-test) |
| 13 | + |
| 14 | +(deftest simple-text |
| 15 | + (eca/start-process!) |
| 16 | + |
| 17 | + (testing "We use the default model from custom provider" |
| 18 | + (is (match? |
| 19 | + {:models (m/embeds ["myProvider/foo-1"]) |
| 20 | + :chatDefaultModel "myProvider/foo-1"} |
| 21 | + (eca/request! (fixture/initialize-request |
| 22 | + {:initializationOptions |
| 23 | + (merge fixture/default-init-options |
| 24 | + {:customProviders |
| 25 | + {"myProvider" |
| 26 | + {:api "openai-responses" |
| 27 | + :url (str "http://localhost:" llm-mock.server/port "/openai") |
| 28 | + :key "foobar" |
| 29 | + :models ["foo-0" "foo-1"] |
| 30 | + :defaultModel "foo-1"}}}) |
| 31 | + :capabilities {:codeAssistant {:chat {}}}}))))) |
| 32 | + (eca/notify! (fixture/initialized-notification)) |
| 33 | + (let [chat-id* (atom nil)] |
| 34 | + (testing "We send a simple hello message" |
| 35 | + (llm.mocks/set-case! :simple-text-0) |
| 36 | + (let [req-id 0 |
| 37 | + resp (eca/request! (fixture/chat-prompt-request |
| 38 | + {:request-id req-id |
| 39 | + :model "myProvider/foo-1" |
| 40 | + :message "Tell me a joke!"})) |
| 41 | + chat-id (reset! chat-id* (:chatId resp))] |
| 42 | + |
| 43 | + (is (match? |
| 44 | + {:chatId (m/pred string?) |
| 45 | + :model "myProvider/foo-1" |
| 46 | + :status "success"} |
| 47 | + resp)) |
| 48 | + |
| 49 | + (match-content chat-id req-id "user" {:type "text" :text "Tell me a joke!\n"}) |
| 50 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
| 51 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"}) |
| 52 | + (match-content chat-id req-id "assistant" {:type "text" :text "Knock"}) |
| 53 | + (match-content chat-id req-id "assistant" {:type "text" :text " knock!"}) |
| 54 | + (match-content chat-id req-id "system" {:type "usage" |
| 55 | + :messageInputTokens 10 |
| 56 | + :messageOutputTokens 20 |
| 57 | + :sessionTokens 30 |
| 58 | + :messageCost m/absent |
| 59 | + :sessionCost m/absent}) |
| 60 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}) |
| 61 | + (is (match? |
| 62 | + {:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]}] |
| 63 | + :instructions (m/pred string?)} |
| 64 | + llm.mocks/*last-req-body*)))) |
| 65 | + |
| 66 | + (testing "We reply" |
| 67 | + (llm.mocks/set-case! :simple-text-1) |
| 68 | + (let [req-id 1 |
| 69 | + resp (eca/request! (fixture/chat-prompt-request |
| 70 | + {:chat-id @chat-id* |
| 71 | + :request-id req-id |
| 72 | + :model "myProvider/foo-1" |
| 73 | + :message "Who's there?"})) |
| 74 | + chat-id @chat-id*] |
| 75 | + |
| 76 | + (is (match? |
| 77 | + {:chatId (m/pred string?) |
| 78 | + :model "myProvider/foo-1" |
| 79 | + :status "success"} |
| 80 | + resp)) |
| 81 | + |
| 82 | + (match-content chat-id req-id "user" {:type "text" :text "Who's there?\n"}) |
| 83 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
| 84 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"}) |
| 85 | + (match-content chat-id req-id "assistant" {:type "text" :text "Foo"}) |
| 86 | + (match-content chat-id req-id "system" {:type "usage" |
| 87 | + :messageInputTokens 10 |
| 88 | + :messageOutputTokens 5 |
| 89 | + :sessionTokens 45 |
| 90 | + :messageCost m/absent |
| 91 | + :sessionCost m/absent}) |
| 92 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}) |
| 93 | + (is (match? |
| 94 | + {:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]} |
| 95 | + {:role "assistant" :content [{:type "output_text" :text "Knock knock!"}]} |
| 96 | + {:role "user" :content [{:type "input_text" :text "Who's there?"}]}]} |
| 97 | + llm.mocks/*last-req-body*)))) |
| 98 | + |
| 99 | + (testing "model reply again keeping context" |
| 100 | + (llm.mocks/set-case! :simple-text-2) |
| 101 | + (let [req-id 2 |
| 102 | + resp (eca/request! (fixture/chat-prompt-request |
| 103 | + {:chat-id @chat-id* |
| 104 | + :request-id req-id |
| 105 | + :model "myProvider/foo-1" |
| 106 | + :message "What foo?"})) |
| 107 | + chat-id @chat-id*] |
| 108 | + |
| 109 | + (is (match? |
| 110 | + {:chatId (m/pred string?) |
| 111 | + :model "myProvider/foo-1" |
| 112 | + :status "success"} |
| 113 | + resp)) |
| 114 | + |
| 115 | + (match-content chat-id req-id "user" {:type "text" :text "What foo?\n"}) |
| 116 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
| 117 | + (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Generating"}) |
| 118 | + (match-content chat-id req-id "assistant" {:type "text" :text "Foo"}) |
| 119 | + (match-content chat-id req-id "assistant" {:type "text" :text " bar!"}) |
| 120 | + (match-content chat-id req-id "assistant" {:type "text" :text "\n\n"}) |
| 121 | + (match-content chat-id req-id "assistant" {:type "text" :text "Ha!"}) |
| 122 | + (match-content chat-id req-id "system" {:type "usage" |
| 123 | + :messageInputTokens 5 |
| 124 | + :messageOutputTokens 15 |
| 125 | + :sessionTokens 65 |
| 126 | + :messageCost m/absent |
| 127 | + :sessionCost m/absent}) |
| 128 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}) |
| 129 | + (is (match? |
| 130 | + {:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]} |
| 131 | + {:role "assistant" :content [{:type "output_text" :text "Knock knock!"}]} |
| 132 | + {:role "user" :content [{:type "input_text" :text "Who's there?"}]} |
| 133 | + {:role "assistant" :content [{:type "output_text" :text "Foo"}]} |
| 134 | + {:role "user" :content [{:type "input_text" :text "What foo?"}]}]} |
| 135 | + llm.mocks/*last-req-body*)))))) |
0 commit comments