@@ -10,6 +10,7 @@ class NomicEmbeddings(Embeddings):
10
10
"""NomicEmbeddings embedding model.
11
11
12
12
Example:
13
+
13
14
.. code-block:: python
14
15
15
16
from langchain_nomic import NomicEmbeddings
@@ -66,16 +67,16 @@ def __init__(
66
67
67
68
Args:
68
69
model: model name
69
- nomic_api_key: optionally, set the Nomic API key. Uses the NOMIC_API_KEY
70
+ nomic_api_key: optionally, set the Nomic API key. Uses the `` NOMIC_API_KEY``
70
71
environment variable by default.
71
72
dimensionality: The embedding dimension, for use with Matryoshka-capable
72
73
models. Defaults to full-size.
73
- inference_mode: How to generate embeddings. One of `remote` , `local`
74
- (Embed4All), or `dynamic` (automatic). Defaults to `remote`.
74
+ inference_mode: How to generate embeddings. One of ``' remote'`` , ``' local'` `
75
+ (Embed4All), or ``' dynamic'`` (automatic). Defaults to ``' remote'` `.
75
76
device: The device to use for local embeddings. Choices include
76
- `cpu` , `gpu` , `nvidia` , `amd` , or a specific device name. See
77
- the docstring for `GPT4All.__init__` for more info. Typically
78
- defaults to CPU . Do not use on macOS.
77
+ ``' cpu'`` , ``' gpu'`` , ``' nvidia'`` , ``' amd'`` , or a specific device
78
+ name. See the docstring for `` GPT4All.__init__`` for more info.
79
+ Typically defaults to ``'cpu'`` . Do not use on macOS.
79
80
"""
80
81
_api_key = nomic_api_key or os .environ .get ("NOMIC_API_KEY" )
81
82
if _api_key :
@@ -91,8 +92,8 @@ def embed(self, texts: list[str], *, task_type: str) -> list[list[float]]:
91
92
92
93
Args:
93
94
texts: list of texts to embed
94
- task_type: the task type to use when embedding. One of `search_query`,
95
- `search_document` , `classification` , `clustering`
95
+ task_type: the task type to use when embedding. One of ``' search_query'` `,
96
+ ``' search_document'`` , ``' classification'`` , ``' clustering'` `
96
97
"""
97
98
98
99
output = embed .text (
0 commit comments