|
17 | 17 | (slurp path)) |
18 | 18 |
|
19 | 19 | (defn complete! |
20 | | - [{:keys [model context user-prompt config on-message-received on-error past-messages]}] |
21 | | - (cond |
22 | | - (contains? #{"o4-mini" "gpt-4.1"} model) |
23 | | - (llm-providers.openai/completion! |
24 | | - {:model model |
25 | | - :context context |
26 | | - :user-prompt user-prompt |
27 | | - :past-messages past-messages |
28 | | - :api-key (:openai-api-key config)} |
29 | | - {:on-message-received on-message-received |
30 | | - :on-error on-error}) |
| 20 | + [{:keys [model context user-prompt config on-first-message-received on-message-received on-error past-messages]}] |
| 21 | + (let [first-message-received* (atom false) |
| 22 | + on-message-received-wrapper (fn [& args] |
| 23 | + (when-not @first-message-received* |
| 24 | + (reset! first-message-received* true) |
| 25 | + (apply on-first-message-received args)) |
| 26 | + (apply on-message-received args))] |
| 27 | + (cond |
| 28 | + (contains? #{"o4-mini" "gpt-4.1"} model) |
| 29 | + (llm-providers.openai/completion! |
| 30 | + {:model model |
| 31 | + :context context |
| 32 | + :user-prompt user-prompt |
| 33 | + :past-messages past-messages |
| 34 | + :api-key (:openai-api-key config)} |
| 35 | + {:on-message-received on-message-received-wrapper |
| 36 | + :on-error on-error}) |
31 | 37 |
|
32 | | - (contains? #{"claude-sonnet-4-0" |
33 | | - "claude-opus-4-0" |
34 | | - "claude-3-5-haiku-latest"} model) |
35 | | - (llm-providers.anthropic/completion! |
36 | | - {:model model |
37 | | - :context context |
38 | | - :user-prompt user-prompt |
39 | | - :past-messages past-messages |
40 | | - :api-key (:anthropic-api-key config)} |
41 | | - {:on-message-received on-message-received |
42 | | - :on-error on-error}) |
| 38 | + (contains? #{"claude-sonnet-4-0" |
| 39 | + "claude-opus-4-0" |
| 40 | + "claude-3-5-haiku-latest"} model) |
| 41 | + (llm-providers.anthropic/completion! |
| 42 | + {:model model |
| 43 | + :context context |
| 44 | + :user-prompt user-prompt |
| 45 | + :past-messages past-messages |
| 46 | + :api-key (:anthropic-api-key config)} |
| 47 | + {:on-message-received on-message-received-wrapper |
| 48 | + :on-error on-error}) |
43 | 49 |
|
44 | | - (string/starts-with? model config/ollama-model-prefix) |
45 | | - (llm-providers.ollama/completion! |
46 | | - {:host (-> config :ollama :host) |
47 | | - :port (-> config :ollama :port) |
48 | | - :model (string/replace-first model config/ollama-model-prefix "") |
49 | | - :past-messages past-messages |
50 | | - :context context |
51 | | - :user-prompt user-prompt} |
52 | | - {:on-message-received on-message-received |
53 | | - :on-error on-error}) |
| 50 | + (string/starts-with? model config/ollama-model-prefix) |
| 51 | + (llm-providers.ollama/completion! |
| 52 | + {:host (-> config :ollama :host) |
| 53 | + :port (-> config :ollama :port) |
| 54 | + :model (string/replace-first model config/ollama-model-prefix "") |
| 55 | + :past-messages past-messages |
| 56 | + :context context |
| 57 | + :user-prompt user-prompt} |
| 58 | + {:on-message-received on-message-received-wrapper |
| 59 | + :on-error on-error}) |
54 | 60 |
|
55 | | - :else |
56 | | - (on-error {:msg (str "ECA Unsupported model: " model)}))) |
| 61 | + :else |
| 62 | + (on-error {:msg (str "ECA Unsupported model: " model)})))) |
0 commit comments