Skip to content

Commit fbb6553

Browse files
committed
add keep_alive
1 parent f618a2f commit fbb6553

File tree

2 files changed

+36
-2
lines changed

2 files changed

+36
-2
lines changed

ollama/_client.py

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ def generate(
9292
format: Literal['', 'json'] = '',
9393
images: Optional[Sequence[AnyStr]] = None,
9494
options: Optional[Options] = None,
95+
keep_alive: Optional[Union[float, str]] = None,
9596
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
9697
"""
9798
Create a response using the requested model.
@@ -120,6 +121,7 @@ def generate(
120121
'images': [_encode_image(image) for image in images or []],
121122
'format': format,
122123
'options': options or {},
124+
'keep_alive': keep_alive,
123125
},
124126
stream=stream,
125127
)
@@ -131,6 +133,7 @@ def chat(
131133
stream: bool = False,
132134
format: Literal['', 'json'] = '',
133135
options: Optional[Options] = None,
136+
keep_alive: Optional[Union[float, str]] = None,
134137
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
135138
"""
136139
Create a chat response using the requested model.
@@ -164,18 +167,26 @@ def chat(
164167
'stream': stream,
165168
'format': format,
166169
'options': options or {},
170+
'keep_alive': keep_alive,
167171
},
168172
stream=stream,
169173
)
170174

171-
def embeddings(self, model: str = '', prompt: str = '', options: Optional[Options] = None) -> Sequence[float]:
175+
def embeddings(
176+
self,
177+
model: str = '',
178+
prompt: str = '',
179+
options: Optional[Options] = None,
180+
keep_alive: Optional[Union[float, str]] = None,
181+
) -> Sequence[float]:
172182
return self._request(
173183
'POST',
174184
'/api/embeddings',
175185
json={
176186
'model': model,
177187
'prompt': prompt,
178188
'options': options or {},
189+
'keep_alive': keep_alive,
179190
},
180191
).json()
181192

@@ -360,6 +371,7 @@ async def generate(
360371
format: Literal['', 'json'] = '',
361372
images: Optional[Sequence[AnyStr]] = None,
362373
options: Optional[Options] = None,
374+
keep_alive: Optional[Union[float, str]] = None,
363375
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
364376
"""
365377
Create a response using the requested model.
@@ -387,6 +399,7 @@ async def generate(
387399
'images': [_encode_image(image) for image in images or []],
388400
'format': format,
389401
'options': options or {},
402+
'keep_alive': keep_alive,
390403
},
391404
stream=stream,
392405
)
@@ -398,6 +411,7 @@ async def chat(
398411
stream: bool = False,
399412
format: Literal['', 'json'] = '',
400413
options: Optional[Options] = None,
414+
keep_alive: Optional[Union[float, str]] = None,
401415
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
402416
"""
403417
Create a chat response using the requested model.
@@ -430,18 +444,26 @@ async def chat(
430444
'stream': stream,
431445
'format': format,
432446
'options': options or {},
447+
'keep_alive': keep_alive,
433448
},
434449
stream=stream,
435450
)
436451

437-
async def embeddings(self, model: str = '', prompt: str = '', options: Optional[Options] = None) -> Sequence[float]:
452+
async def embeddings(
453+
self,
454+
model: str = '',
455+
prompt: str = '',
456+
options: Optional[Options] = None,
457+
keep_alive: Optional[Union[float, str]] = None,
458+
) -> Sequence[float]:
438459
response = await self._request(
439460
'POST',
440461
'/api/embeddings',
441462
json={
442463
'model': model,
443464
'prompt': prompt,
444465
'options': options or {},
466+
'keep_alive': keep_alive,
445467
},
446468
)
447469

tests/test_client.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ def test_client_chat(httpserver: HTTPServer):
2929
'stream': False,
3030
'format': '',
3131
'options': {},
32+
'keep_alive': None,
3233
},
3334
).respond_with_json(
3435
{
@@ -75,6 +76,7 @@ def generate():
7576
'stream': True,
7677
'format': '',
7778
'options': {},
79+
'keep_alive': None,
7880
},
7981
).respond_with_handler(stream_handler)
8082

@@ -103,6 +105,7 @@ def test_client_chat_images(httpserver: HTTPServer):
103105
'stream': False,
104106
'format': '',
105107
'options': {},
108+
'keep_alive': None,
106109
},
107110
).respond_with_json(
108111
{
@@ -139,6 +142,7 @@ def test_client_generate(httpserver: HTTPServer):
139142
'images': [],
140143
'format': '',
141144
'options': {},
145+
'keep_alive': None,
142146
},
143147
).respond_with_json(
144148
{
@@ -183,6 +187,7 @@ def generate():
183187
'images': [],
184188
'format': '',
185189
'options': {},
190+
'keep_alive': None,
186191
},
187192
).respond_with_handler(stream_handler)
188193

@@ -210,6 +215,7 @@ def test_client_generate_images(httpserver: HTTPServer):
210215
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
211216
'format': '',
212217
'options': {},
218+
'keep_alive': None,
213219
},
214220
).respond_with_json(
215221
{
@@ -465,6 +471,7 @@ async def test_async_client_chat(httpserver: HTTPServer):
465471
'stream': False,
466472
'format': '',
467473
'options': {},
474+
'keep_alive': None,
468475
},
469476
).respond_with_json({})
470477

@@ -502,6 +509,7 @@ def generate():
502509
'stream': True,
503510
'format': '',
504511
'options': {},
512+
'keep_alive': None,
505513
},
506514
).respond_with_handler(stream_handler)
507515

@@ -531,6 +539,7 @@ async def test_async_client_chat_images(httpserver: HTTPServer):
531539
'stream': False,
532540
'format': '',
533541
'options': {},
542+
'keep_alive': None,
534543
},
535544
).respond_with_json({})
536545

@@ -558,6 +567,7 @@ async def test_async_client_generate(httpserver: HTTPServer):
558567
'images': [],
559568
'format': '',
560569
'options': {},
570+
'keep_alive': None,
561571
},
562572
).respond_with_json({})
563573

@@ -597,6 +607,7 @@ def generate():
597607
'images': [],
598608
'format': '',
599609
'options': {},
610+
'keep_alive': None,
600611
},
601612
).respond_with_handler(stream_handler)
602613

@@ -625,6 +636,7 @@ async def test_async_client_generate_images(httpserver: HTTPServer):
625636
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
626637
'format': '',
627638
'options': {},
639+
'keep_alive': None,
628640
},
629641
).respond_with_json({})
630642

0 commit comments

Comments
 (0)