|
14 | 14 | (deftest simple-text |
15 | 15 | (eca/start-process!) |
16 | 16 |
|
| 17 | + (eca/request! (fixture/initialize-request |
| 18 | + {:initializationOptions |
| 19 | + (merge fixture/default-init-options |
| 20 | + {:defaultModel "my-provider/foo1" |
| 21 | + :providers |
| 22 | + {"myProvider" |
| 23 | + {:api "openai-responses" |
| 24 | + :url (str "http://localhost:" llm-mock.server/port "/openai") |
| 25 | + :key "foobar" |
| 26 | + :models {"foo0" {} |
| 27 | + "foo1" {}}}}}) |
| 28 | + :capabilities {:codeAssistant {:chat {}}}})) |
| 29 | + |
| 30 | + (eca/notify! (fixture/initialized-notification)) |
17 | 31 | (testing "We use the default model from custom provider" |
18 | 32 | (is (match? |
19 | | - {:models (m/embeds ["my-provider/foo1"]) |
20 | | - :chatDefaultModel "my-provider/foo1"} |
21 | | - (eca/request! (fixture/initialize-request |
22 | | - {:initializationOptions |
23 | | - (merge fixture/default-init-options |
24 | | - {:defaultModel "my-provider/foo1" |
25 | | - :providers |
26 | | - {"myProvider" |
27 | | - {:api "openai-responses" |
28 | | - :url (str "http://localhost:" llm-mock.server/port "/openai") |
29 | | - :key "foobar" |
30 | | - :models {"foo0" {} |
31 | | - "foo1" {}}}}}) |
32 | | - :capabilities {:codeAssistant {:chat {}}}}))))) |
33 | | - (eca/notify! (fixture/initialized-notification)) |
| 33 | + {:chat {:models (m/embeds ["my-provider/foo1"]) |
| 34 | + :selectModel "my-provider/foo1"}} |
| 35 | + (eca/client-awaits-server-notification :config/updated)))) |
| 36 | + |
34 | 37 | (let [chat-id* (atom nil)] |
35 | 38 | (testing "We send a simple hello message" |
36 | 39 | (llm.mocks/set-case! :simple-text-0) |
|
127 | 130 | (deftest openai-chat-simple-text |
128 | 131 | (eca/start-process!) |
129 | 132 |
|
| 133 | + (eca/request! (fixture/initialize-request |
| 134 | + {:initializationOptions |
| 135 | + (merge fixture/default-init-options |
| 136 | + {:defaultModel "my-provider/deepseekcoder" |
| 137 | + :providers |
| 138 | + {"myProvider" |
| 139 | + {:api "openai-chat" |
| 140 | + :url (str "http://localhost:" llm-mock.server/port "/openai-chat") |
| 141 | + :key "foobar" |
| 142 | + :models {"deepseekchat" {} |
| 143 | + "deepseekcoder" {}}}}}) |
| 144 | + :capabilities {:codeAssistant {:chat {}}}})) |
| 145 | + |
| 146 | + (eca/notify! (fixture/initialized-notification)) |
130 | 147 | (testing "We use the default model from custom provider" |
131 | 148 | (is (match? |
132 | | - {:models (m/embeds ["my-provider/deepseekcoder"]) |
133 | | - :chatDefaultModel "my-provider/deepseekcoder"} |
134 | | - (eca/request! (fixture/initialize-request |
135 | | - {:initializationOptions |
136 | | - (merge fixture/default-init-options |
137 | | - {:defaultModel "my-provider/deepseekcoder" |
138 | | - :providers |
139 | | - {"myProvider" |
140 | | - {:api "openai-chat" |
141 | | - :url (str "http://localhost:" llm-mock.server/port "/openai-chat") |
142 | | - :key "foobar" |
143 | | - :models {"deepseekchat" {} |
144 | | - "deepseekcoder" {}}}}}) |
145 | | - :capabilities {:codeAssistant {:chat {}}}}))))) |
146 | | - (eca/notify! (fixture/initialized-notification)) |
| 149 | + {:chat {:models (m/embeds ["my-provider/deepseekcoder"]) |
| 150 | + :selectModel "my-provider/deepseekcoder"}} |
| 151 | + (eca/client-awaits-server-notification :config/updated)))) |
147 | 152 | (let [chat-id* (atom nil)] |
148 | 153 | (testing "We send a simple hello message" |
149 | 154 | (llm.mocks/set-case! :simple-text-0) |
|
0 commit comments