|
13 | 13 | (deftest simple-text |
14 | 14 | (eca/start-process!) |
15 | 15 |
|
16 | | - (is (match? |
17 | | - {:chatDefaultModel "ollama/qwen3" |
18 | | - :models (m/embeds ["ollama/qwen3"])} |
19 | | - (eca/request! (fixture/initialize-request {:initializationOptions (h/deep-merge fixture/default-init-options |
20 | | - {:providers {"anthropic" {:key nil} |
21 | | - "openai" {:key nil} |
22 | | - "github-copilot" {:key nil} |
23 | | - "ollama" {:url (str fixture/base-llm-mock-url "/ollama")}}}) |
24 | | - :capabilities {:codeAssistant {:chat {}}}})))) |
| 16 | + (eca/request! (fixture/initialize-request {:initializationOptions (h/deep-merge fixture/default-init-options |
| 17 | + {:providers {"anthropic" {:key nil} |
| 18 | + "openai" {:key nil} |
| 19 | + "github-copilot" {:key nil} |
| 20 | + "ollama" {:url (str fixture/base-llm-mock-url "/ollama")}}}) |
| 21 | + :capabilities {:codeAssistant {:chat {}}}})) |
25 | 22 | (eca/notify! (fixture/initialized-notification)) |
| 23 | + (testing "the default model is local" |
| 24 | + (is (match? |
| 25 | + {:chat {:models (m/embeds ["ollama/qwen3"]) |
| 26 | + :selectModel "ollama/qwen3"}} |
| 27 | + (eca/client-awaits-server-notification :config/updated)))) |
26 | 28 | (let [chat-id* (atom nil)] |
27 | 29 | (testing "We send a simple hello message" |
28 | 30 | (llm.mocks/set-case! :simple-text-0) |
|
105 | 107 | (deftest reasoning-text |
106 | 108 | (eca/start-process!) |
107 | 109 |
|
108 | | - (is (match? |
109 | | - {:chatDefaultModel "ollama/qwen3" |
110 | | - :models (m/embeds ["ollama/qwen3"])} |
111 | | - (eca/request! (fixture/initialize-request {:initializationOptions (h/deep-merge fixture/default-init-options |
112 | | - {:providers {"anthropic" {:key nil} |
113 | | - "openai" {:key nil} |
114 | | - "github-copilot" {:key nil} |
115 | | - "ollama" {:url (str fixture/base-llm-mock-url "/ollama")}}}) |
116 | | - :capabilities {:codeAssistant {:chat {}}}})))) |
| 110 | + (eca/request! (fixture/initialize-request {:initializationOptions (h/deep-merge fixture/default-init-options |
| 111 | + {:providers {"anthropic" {:key nil} |
| 112 | + "openai" {:key nil} |
| 113 | + "github-copilot" {:key nil} |
| 114 | + "ollama" {:url (str fixture/base-llm-mock-url "/ollama")}}}) |
| 115 | + :capabilities {:codeAssistant {:chat {}}}})) |
117 | 116 | (eca/notify! (fixture/initialized-notification)) |
| 117 | + (testing "the default model is local" |
| 118 | + (is (match? |
| 119 | + {:chat {:models (m/embeds ["ollama/qwen3"]) |
| 120 | + :selectModel "ollama/qwen3"}} |
| 121 | + (eca/client-awaits-server-notification :config/updated)))) |
118 | 122 | (let [chat-id* (atom nil)] |
119 | 123 | (testing "We send a hello message" |
120 | 124 | (llm.mocks/set-case! :reasoning-0) |
|
182 | 186 | (deftest tool-calling |
183 | 187 | (eca/start-process!) |
184 | 188 |
|
185 | | - (is (match? |
186 | | - {:chatDefaultModel "ollama/qwen3" |
187 | | - :models (m/embeds ["ollama/qwen3"])} |
188 | | - (eca/request! (fixture/initialize-request {:initializationOptions (h/deep-merge fixture/default-init-options |
189 | | - {:providers {"anthropic" {:key nil} |
190 | | - "openai" {:key nil} |
191 | | - "github-copilot" {:key nil} |
192 | | - "ollama" {:url (str fixture/base-llm-mock-url "/ollama")}}}) |
193 | | - :capabilities {:codeAssistant {:chat {}}}})))) |
| 189 | + (eca/request! (fixture/initialize-request {:initializationOptions (h/deep-merge fixture/default-init-options |
| 190 | + {:providers {"anthropic" {:key nil} |
| 191 | + "openai" {:key nil} |
| 192 | + "github-copilot" {:key nil} |
| 193 | + "ollama" {:url (str fixture/base-llm-mock-url "/ollama")}}}) |
| 194 | + :capabilities {:codeAssistant {:chat {}}}})) |
194 | 195 | (eca/notify! (fixture/initialized-notification)) |
| 196 | + (testing "the default model is local" |
| 197 | + (is (match? |
| 198 | + {:chat {:models (m/embeds ["ollama/qwen3"]) |
| 199 | + :selectModel "ollama/qwen3"}} |
| 200 | + (eca/client-awaits-server-notification :config/updated)))) |
195 | 201 | (let [chat-id* (atom nil)] |
196 | 202 | (testing "We ask what files LLM see" |
197 | 203 | (llm.mocks/set-case! :tool-calling-0) |
|
0 commit comments