|
27 | 27 | (llm.mocks/set-case! :simple-text-0) |
28 | 28 | (let [req-id 0 |
29 | 29 | resp (eca/request! (fixture/chat-prompt-request |
30 | | - {:request-id req-id |
31 | | - :model "gpt-5" |
32 | | - :message "Tell me a joke!"})) |
| 30 | + {:request-id req-id |
| 31 | + :model "gpt-5" |
| 32 | + :message "Tell me a joke!"})) |
33 | 33 | chat-id (reset! chat-id* (:chatId resp))] |
34 | 34 |
|
35 | 35 | (is (match? |
36 | | - {:chatId (m/pred string?) |
37 | | - :model "gpt-5" |
38 | | - :status "success"} |
39 | | - resp)) |
| 36 | + {:chatId (m/pred string?) |
| 37 | + :model "gpt-5" |
| 38 | + :status "success"} |
| 39 | + resp)) |
40 | 40 |
|
41 | 41 | (match-content chat-id req-id "user" {:type "text" :text "Tell me a joke!\n"}) |
42 | 42 | (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
|
49 | 49 | :sessionTokens 30 |
50 | 50 | :messageCost (m/pred string?) |
51 | 51 | :sessionCost (m/pred string?)}) |
52 | | - (match-content chat-id req-id "system" {:type "progress" :state "finished"}))) |
| 52 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}) |
| 53 | + (is (match? |
| 54 | + {:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]}] |
| 55 | + :instructions (m/pred string?)} |
| 56 | + llm.mocks/*last-req-body*)))) |
53 | 57 |
|
54 | 58 | (testing "We reply" |
55 | 59 | (llm.mocks/set-case! :simple-text-1) |
56 | 60 | (let [req-id 1 |
57 | 61 | resp (eca/request! (fixture/chat-prompt-request |
58 | | - {:chat-id @chat-id* |
59 | | - :request-id req-id |
60 | | - :model "gpt-5" |
61 | | - :message "Who's there?"})) |
| 62 | + {:chat-id @chat-id* |
| 63 | + :request-id req-id |
| 64 | + :model "gpt-5" |
| 65 | + :message "Who's there?"})) |
62 | 66 | chat-id @chat-id*] |
63 | 67 |
|
64 | 68 | (is (match? |
65 | | - {:chatId (m/pred string?) |
66 | | - :model "gpt-5" |
67 | | - :status "success"} |
68 | | - resp)) |
| 69 | + {:chatId (m/pred string?) |
| 70 | + :model "gpt-5" |
| 71 | + :status "success"} |
| 72 | + resp)) |
69 | 73 |
|
70 | 74 | (match-content chat-id req-id "user" {:type "text" :text "Who's there?\n"}) |
71 | 75 | (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
|
77 | 81 | :sessionTokens 45 |
78 | 82 | :messageCost (m/pred string?) |
79 | 83 | :sessionCost (m/pred string?)}) |
80 | | - (match-content chat-id req-id "system" {:type "progress" :state "finished"}))) |
| 84 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}) |
| 85 | + (is (match? |
| 86 | + {:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]} |
| 87 | + {:role "assistant" :content [{:type "output_text" :text "Knock knock!"}]} |
| 88 | + {:role "user" :content [{:type "input_text" :text "Who's there?"}]}]} |
| 89 | + llm.mocks/*last-req-body*)))) |
81 | 90 |
|
82 | 91 | (testing "model reply again keeping context" |
83 | 92 | (llm.mocks/set-case! :simple-text-2) |
84 | 93 | (let [req-id 2 |
85 | 94 | resp (eca/request! (fixture/chat-prompt-request |
86 | | - {:chat-id @chat-id* |
87 | | - :request-id req-id |
88 | | - :model "gpt-5" |
89 | | - :message "What foo?"})) |
| 95 | + {:chat-id @chat-id* |
| 96 | + :request-id req-id |
| 97 | + :model "gpt-5" |
| 98 | + :message "What foo?"})) |
90 | 99 | chat-id @chat-id*] |
91 | 100 |
|
92 | 101 | (is (match? |
93 | | - {:chatId (m/pred string?) |
94 | | - :model "gpt-5" |
95 | | - :status "success"} |
96 | | - resp)) |
| 102 | + {:chatId (m/pred string?) |
| 103 | + :model "gpt-5" |
| 104 | + :status "success"} |
| 105 | + resp)) |
97 | 106 |
|
98 | 107 | (match-content chat-id req-id "user" {:type "text" :text "What foo?\n"}) |
99 | 108 | (match-content chat-id req-id "system" {:type "progress" :state "running" :text "Waiting model"}) |
|
108 | 117 | :sessionTokens 65 |
109 | 118 | :messageCost (m/pred string?) |
110 | 119 | :sessionCost (m/pred string?)}) |
111 | | - (match-content chat-id req-id "system" {:type "progress" :state "finished"}))))) |
| 120 | + (match-content chat-id req-id "system" {:type "progress" :state "finished"}) |
| 121 | + (is (match? |
| 122 | + {:input [{:role "user" :content [{:type "input_text" :text "Tell me a joke!"}]} |
| 123 | + {:role "assistant" :content [{:type "output_text" :text "Knock knock!"}]} |
| 124 | + {:role "user" :content [{:type "input_text" :text "Who's there?"}]} |
| 125 | + {:role "assistant" :content [{:type "output_text" :text "Foo"}]} |
| 126 | + {:role "user" :content [{:type "input_text" :text "What foo?"}]}]} |
| 127 | + llm.mocks/*last-req-body*)))))) |
0 commit comments