Skip to content

Commit 5a21009

Browse files
authored
fix: thinking mode now it is properly set (#25)
* fix: thinking mode now it is properly set * Bump version to 0.10.1
1 parent e687ffd commit 5a21009

File tree

5 files changed

+11
-11
lines changed

5 files changed

+11
-11
lines changed

cli-package/pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "ollmcp"
3-
version = "0.10.0"
3+
version = "0.10.1"
44
description = "CLI for MCP Client for Ollama - An easy-to-use command for interacting with Ollama through MCP"
55
readme = "README.md"
66
requires-python = ">=3.10"
@@ -9,7 +9,7 @@ authors = [
99
{name = "Jonathan Löwenstern"}
1010
]
1111
dependencies = [
12-
"mcp-client-for-ollama==0.10.0"
12+
"mcp-client-for-ollama==0.10.1"
1313
]
1414

1515
[project.scripts]

mcp_client_for_ollama/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""MCP Client for Ollama package."""
22

3-
__version__ = "0.10.0"
3+
__version__ = "0.10.1"

mcp_client_for_ollama/client.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -220,8 +220,8 @@ async def process_query(self, query: str) -> str:
220220
}
221221

222222
# Add thinking parameter if thinking mode is enabled and model supports it
223-
if self.thinking_mode and self.supports_thinking_mode():
224-
chat_params["think"] = True
223+
if self.supports_thinking_mode():
224+
chat_params["think"] = self.thinking_mode
225225

226226
# Initial Ollama API call with the query and available tools
227227
stream = await self.ollama.chat(**chat_params)
@@ -231,7 +231,7 @@ async def process_query(self, query: str) -> str:
231231
tool_calls = []
232232
response_text, tool_calls = await self.streaming_manager.process_streaming_response(
233233
stream,
234-
thinking_mode=self.thinking_mode and self.supports_thinking_mode(),
234+
thinking_mode=self.thinking_mode,
235235
show_thinking=self.show_thinking
236236
)
237237
# Check if there are any tool calls in the response
@@ -274,15 +274,15 @@ async def process_query(self, query: str) -> str:
274274
}
275275

276276
# Add thinking parameter if thinking mode is enabled and model supports it
277-
if self.thinking_mode and self.supports_thinking_mode():
278-
chat_params_followup["think"] = True
277+
if self.supports_thinking_mode():
278+
chat_params_followup["think"] = self.thinking_mode
279279

280280
stream = await self.ollama.chat(**chat_params_followup)
281281

282282
# Process the streaming response with thinking mode support
283283
response_text, _ = await self.streaming_manager.process_streaming_response(
284284
stream,
285-
thinking_mode=self.thinking_mode and self.supports_thinking_mode(),
285+
thinking_mode=self.thinking_mode,
286286
show_thinking=self.show_thinking
287287
)
288288

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "mcp-client-for-ollama"
3-
version = "0.10.0"
3+
version = "0.10.1"
44
description = "MCP Client for Ollama - A client for connecting to Model Context Protocol servers using Ollama"
55
readme = "README.md"
66
requires-python = ">=3.10"

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)