Skip to content

Commit 53ff3cd

Browse files
authored
Merge pull request #553 from ollama/drifkin/thinking-levels
add support for 'high'/'medium'/'low' think values
2 parents 34e98bd + aa4b476 commit 53ff3cd

File tree

4 files changed

+40
-11
lines changed

4 files changed

+40
-11
lines changed

examples/README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,3 +73,6 @@ Requirement: `pip install tqdm`
7373

7474
### Thinking (generate) - Enable thinking mode for a model
7575
- [thinking-generate.py](thinking-generate.py)
76+
77+
### Thinking (levels) - Choose the thinking level
78+
- [thinking-levels.py](thinking-generate.py)

examples/thinking-levels.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
from ollama import chat
2+
3+
4+
def heading(text):
5+
print(text)
6+
print('=' * len(text))
7+
8+
9+
messages = [
10+
{'role': 'user', 'content': 'What is 10 + 23?'},
11+
]
12+
13+
# gpt-oss supports 'low', 'medium', 'high'
14+
levels = ['low', 'medium', 'high']
15+
for i, level in enumerate(levels):
16+
response = chat('gpt-oss:20b', messages=messages, think=level)
17+
18+
heading(f'Thinking ({level})')
19+
print(response.message.thinking)
20+
print('\n')
21+
heading('Response')
22+
print(response.message.content)
23+
print('\n')
24+
if i < len(levels) - 1:
25+
print('-' * 20)
26+
print('\n')

ollama/_client.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ def chat(
274274
*,
275275
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
276276
stream: Literal[False] = False,
277-
think: Optional[bool] = None,
277+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
278278
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
279279
options: Optional[Union[Mapping[str, Any], Options]] = None,
280280
keep_alive: Optional[Union[float, str]] = None,
@@ -288,7 +288,7 @@ def chat(
288288
*,
289289
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
290290
stream: Literal[True] = True,
291-
think: Optional[bool] = None,
291+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
292292
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
293293
options: Optional[Union[Mapping[str, Any], Options]] = None,
294294
keep_alive: Optional[Union[float, str]] = None,
@@ -301,7 +301,7 @@ def chat(
301301
*,
302302
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
303303
stream: bool = False,
304-
think: Optional[bool] = None,
304+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
305305
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
306306
options: Optional[Union[Mapping[str, Any], Options]] = None,
307307
keep_alive: Optional[Union[float, str]] = None,
@@ -702,7 +702,7 @@ async def generate(
702702
template: str = '',
703703
context: Optional[Sequence[int]] = None,
704704
stream: Literal[False] = False,
705-
think: Optional[bool] = None,
705+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
706706
raw: bool = False,
707707
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
708708
images: Optional[Sequence[Union[str, bytes, Image]]] = None,
@@ -721,7 +721,7 @@ async def generate(
721721
template: str = '',
722722
context: Optional[Sequence[int]] = None,
723723
stream: Literal[True] = True,
724-
think: Optional[bool] = None,
724+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
725725
raw: bool = False,
726726
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
727727
images: Optional[Sequence[Union[str, bytes, Image]]] = None,
@@ -739,7 +739,7 @@ async def generate(
739739
template: Optional[str] = None,
740740
context: Optional[Sequence[int]] = None,
741741
stream: bool = False,
742-
think: Optional[bool] = None,
742+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
743743
raw: Optional[bool] = None,
744744
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
745745
images: Optional[Sequence[Union[str, bytes, Image]]] = None,
@@ -785,7 +785,7 @@ async def chat(
785785
*,
786786
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
787787
stream: Literal[False] = False,
788-
think: Optional[bool] = None,
788+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
789789
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
790790
options: Optional[Union[Mapping[str, Any], Options]] = None,
791791
keep_alive: Optional[Union[float, str]] = None,
@@ -799,7 +799,7 @@ async def chat(
799799
*,
800800
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
801801
stream: Literal[True] = True,
802-
think: Optional[bool] = None,
802+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
803803
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
804804
options: Optional[Union[Mapping[str, Any], Options]] = None,
805805
keep_alive: Optional[Union[float, str]] = None,
@@ -812,7 +812,7 @@ async def chat(
812812
*,
813813
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
814814
stream: bool = False,
815-
think: Optional[bool] = None,
815+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None,
816816
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
817817
options: Optional[Union[Mapping[str, Any], Options]] = None,
818818
keep_alive: Optional[Union[float, str]] = None,

ollama/_types.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,7 @@ class GenerateRequest(BaseGenerateRequest):
207207
images: Optional[Sequence[Image]] = None
208208
'Image data for multimodal models.'
209209

210-
think: Optional[bool] = None
210+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None
211211
'Enable thinking mode (for thinking models).'
212212

213213

@@ -357,7 +357,7 @@ def serialize_model(self, nxt):
357357
tools: Optional[Sequence[Tool]] = None
358358
'Tools to use for the chat.'
359359

360-
think: Optional[bool] = None
360+
think: Optional[Union[bool, Literal['low', 'medium', 'high']]] = None
361361
'Enable thinking mode (for thinking models).'
362362

363363

0 commit comments

Comments
 (0)