11#! /usr/bin/env bats
22
33source ./ollama_bash_lib.sh
4+ if [ -n " $OLLAMA_TEST_MODE " ]; then
5+ source ./tests/test_helper.bash
6+ fi
47
58setup () {
69 ollama_messages_clear
@@ -13,9 +16,9 @@ setup() {
1316 [ " $status " -eq 0 ]
1417 [ " $output " -eq 1 ]
1518
16- run ollama_messages_last
19+ run ollama_messages_last_json
1720 [ " $status " -eq 0 ]
18- [[ " $output " =~ " hello" ]]
21+ [[ $( echo " $output " | jq -r ' .content ' ) == " hello" ]]
1922
2023 ollama_messages_clear
2124
@@ -27,58 +30,103 @@ setup() {
2730@test " ollama_chat: should have a conversation" {
2831 ollama_messages_add -r " user" -c " what is 1+1?"
2932
30- local tmp_file
31- tmp_file=$( mktemp)
32- ollama_chat -m phi3 > " $tmp_file "
33- local chat_status=$?
34- local chat_output
35- chat_output=$( cat " $tmp_file " )
36- rm " $tmp_file "
37-
38- [ " $chat_status " -eq 0 ]
39- [ -n " $chat_output " ]
40- [[ " $chat_output " =~ " 2" ]]
41-
42- run ollama_messages_count
43- [ " $status " -eq 0 ]
44- [ " $output " -eq 2 ] # user message + assistant response
33+ if [[ " $OLLAMA_TEST_MODE " == " mock" ]]; then
34+ run ollama_chat -m " mock-model:latest"
35+ [ " $status " -eq 0 ]
36+ [[ " $output " == " This is a mock chat response." ]]
37+ else
38+ local tmp_file
39+ tmp_file=$( mktemp)
40+ ollama_chat -m phi3 > " $tmp_file "
41+ local chat_status=$?
42+ local chat_output
43+ chat_output=$( cat " $tmp_file " )
44+ rm " $tmp_file "
45+
46+ [ " $chat_status " -eq 0 ]
47+ [ -n " $chat_output " ]
48+ [[ " $chat_output " =~ " 2" ]]
49+ fi
4550}
4651
47- @test " ollama_chat_json: should have a conversation and return json " {
52+ @test " ollama_chat_json: should have a conversation (non-streaming) " {
4853 ollama_messages_add -r " user" -c " what is 1+1?"
4954 OBL_STREAM=0 # ensure we get a single json response back
5055
51- ollama_chat_json -m phi3
52- local chat_json_status=$?
53- [ " $chat_json_status " -eq 0 ]
54-
55- run ollama_messages_last
56- [ " $status " -eq 0 ]
57- [[ " $output " =~ " 2" ]]
56+ if [[ " $OLLAMA_TEST_MODE " == " mock" ]]; then
57+ # This function modifies history but doesn't print to stdout
58+ ollama_chat_json -m " mock-model:latest"
59+ local status=$?
60+ [ " $status " -eq 0 ]
61+
62+ # Check that history was modified correctly
63+ run ollama_messages_count
64+ [ " $output " -eq 2 ]
65+
66+ run ollama_messages_last_json
67+ [ " $status " -eq 0 ]
68+ _is_valid_json " $output "
69+ [ $? -eq 0 ]
70+ [[ $( echo " $output " | jq -r ' .content' ) == " This is a mock chat response." ]]
71+ else
72+ ollama_chat_json -m phi3
73+ local chat_json_status=$?
74+ [ " $chat_json_status " -eq 0 ]
75+ fi
5876}
5977
78+
6079@test " ollama_chat_stream: should have a streaming conversation" {
6180 ollama_messages_add -r " user" -c " what is 1+1?"
62- run ollama_chat_stream -m phi3
63- [ " $status " -eq 0 ]
64- [ -n " $output " ]
65- [[ " $output " =~ " 2" ]]
81+ if [[ " $OLLAMA_TEST_MODE " == " mock" ]]; then
82+ run ollama_chat_stream -m " mock-model:latest"
83+ [ " $status " -eq 0 ]
84+ [[ " $output " == " This is a mock streaming chat response." ]]
85+ else
86+ run ollama_chat_stream -m phi3
87+ [ " $status " -eq 0 ]
88+ [ -n " $output " ]
89+ [[ " $output " =~ " 2" ]]
90+ fi
6691}
6792
6893@test " ollama_chat_stream_json: should have a streaming conversation and return json" {
6994 ollama_messages_add -r " user" -c " what is 1+1?"
70- run ollama_chat_stream_json -m phi3
71- [ " $status " -eq 0 ]
72- [ -n " $output " ]
73- first_line=$( echo " $output " | head -n 1)
74- _is_valid_json " $first_line "
75- local is_valid_json_status=$?
76- [ " $is_valid_json_status " -eq 0 ]
95+ if [[ " $OLLAMA_TEST_MODE " == " mock" ]]; then
96+ run ollama_chat_stream_json -m " mock-model:latest"
97+ [ " $status " -eq 0 ]
98+ [ -n " $output " ]
99+ first_line=$( echo " $output " | head -n 1)
100+ _is_valid_json " $first_line "
101+ local is_valid_json_status=$?
102+ [ " $is_valid_json_status " -eq 0 ]
103+ [[ $( echo " $first_line " | jq -r ' .message.content' ) == " This is a mock streaming chat response." ]]
104+ else
105+ run ollama_chat_stream_json -m phi3
106+ [ " $status " -eq 0 ]
107+ [ -n " $output " ]
108+ first_line=$( echo " $output " | head -n 1)
109+ _is_valid_json " $first_line "
110+ local is_valid_json_status=$?
111+ [ " $is_valid_json_status " -eq 0 ]
112+ fi
77113}
78114
79115@test " ollama_chat_stream: should handle newlines correctly" {
80116 ollama_messages_add -r " user" -c " generate a list of three fruits, each on a new line."
81- run ollama_chat_stream -m phi3
82- [ " $status " -eq 0 ]
83- [[ " $output " =~ .* \n .* ]]
117+ if [[ " $OLLAMA_TEST_MODE " == " mock" ]]; then
118+ run ollama_chat_stream -m " mock-model:latest"
119+ [ " $status " -eq 0 ]
120+ expected_output=$' Apple\n Banana\n Cherry'
121+ echo " ---Actual Output---"
122+ echo " $output " | cat -A
123+ echo " ---Expected Output---"
124+ echo " $expected_output " | cat -A
125+ echo " -------------------"
126+ [[ " $output " == " $expected_output " ]]
127+ else
128+ run ollama_chat_stream -m phi3
129+ [ " $status " -eq 0 ]
130+ [[ " $output " =~ .* \n .* ]]
131+ fi
84132}
0 commit comments