Skip to content

Commit b0ea6d9

Browse files
authored
Support api/embed (#208)
* api/embed * api/embed * api/embed * rm legacy
1 parent 359c63d commit b0ea6d9

File tree

2 files changed

+50
-0
lines changed

2 files changed

+50
-0
lines changed

ollama/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
'ResponseError',
2222
'generate',
2323
'chat',
24+
'embed',
2425
'embeddings',
2526
'pull',
2627
'push',
@@ -36,6 +37,7 @@
3637

3738
generate = _client.generate
3839
chat = _client.chat
40+
embed = _client.embed
3941
embeddings = _client.embeddings
4042
pull = _client.pull
4143
push = _client.push

ollama/_client.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,6 +243,29 @@ def chat(
243243
stream=stream,
244244
)
245245

246+
def embed(
247+
self,
248+
model: str = '',
249+
input: Union[str, Sequence[AnyStr]] = '',
250+
truncate: bool = True,
251+
options: Optional[Options] = None,
252+
keep_alive: Optional[Union[float, str]] = None,
253+
) -> Mapping[str, Any]:
254+
if not model:
255+
raise RequestError('must provide a model')
256+
257+
return self._request(
258+
'POST',
259+
'/api/embed',
260+
json={
261+
'model': model,
262+
'input': input,
263+
'truncate': truncate,
264+
'options': options or {},
265+
'keep_alive': keep_alive,
266+
},
267+
).json()
268+
246269
def embeddings(
247270
self,
248271
model: str = '',
@@ -634,6 +657,31 @@ async def chat(
634657
stream=stream,
635658
)
636659

660+
async def embed(
661+
self,
662+
model: str = '',
663+
input: Union[str, Sequence[AnyStr]] = '',
664+
truncate: bool = True,
665+
options: Optional[Options] = None,
666+
keep_alive: Optional[Union[float, str]] = None,
667+
) -> Mapping[str, Any]:
668+
if not model:
669+
raise RequestError('must provide a model')
670+
671+
response = await self._request(
672+
'POST',
673+
'/api/embed',
674+
json={
675+
'model': model,
676+
'input': input,
677+
'truncate': truncate,
678+
'options': options or {},
679+
'keep_alive': keep_alive,
680+
},
681+
)
682+
683+
return response.json()
684+
637685
async def embeddings(
638686
self,
639687
model: str = '',

0 commit comments

Comments
 (0)