forked from YILING0013/AI_NovelGenerator
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathembedding_adapters.py
More file actions
313 lines (284 loc) · 12 KB
/
embedding_adapters.py
File metadata and controls
313 lines (284 loc) · 12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
# embedding_adapters.py
# -*- coding: utf-8 -*-
import logging
import traceback
from typing import List
import requests
from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
def ensure_openai_base_url_has_v1(url: str) -> str:
"""
若用户输入的 url 不包含 '/v1',则在末尾追加 '/v1'。
"""
import re
url = url.strip()
if not url:
return url
if not re.search(r'/v\d+$', url):
if '/v1' not in url:
url = url.rstrip('/') + '/v1'
return url
class BaseEmbeddingAdapter:
"""
Embedding 接口统一基类
"""
def embed_documents(self, texts: List[str]) -> List[List[float]]:
raise NotImplementedError
def embed_query(self, query: str) -> List[float]:
raise NotImplementedError
class OpenAIEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 OpenAIEmbeddings(或兼容接口)的适配器
"""
def __init__(self, api_key: str, base_url: str, model_name: str):
self._embedding = OpenAIEmbeddings(
openai_api_key=api_key,
openai_api_base=ensure_openai_base_url_has_v1(base_url),
model=model_name
)
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return self._embedding.embed_documents(texts)
def embed_query(self, query: str) -> List[float]:
return self._embedding.embed_query(query)
class AzureOpenAIEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 AzureOpenAIEmbeddings(或兼容接口)的适配器
"""
def __init__(self, api_key: str, base_url: str, model_name: str):
import re
match = re.match(r'https://(.+?)/openai/deployments/(.+?)/embeddings\?api-version=(.+)', base_url)
if match:
self.azure_endpoint = f"https://{match.group(1)}"
self.azure_deployment = match.group(2)
self.api_version = match.group(3)
else:
raise ValueError("Invalid Azure OpenAI base_url format")
self._embedding = AzureOpenAIEmbeddings(
azure_endpoint=self.azure_endpoint,
azure_deployment=self.azure_deployment,
openai_api_key=api_key,
api_version=self.api_version,
)
def embed_documents(self, texts: List[str]) -> List[List[float]]:
return self._embedding.embed_documents(texts)
def embed_query(self, query: str) -> List[float]:
return self._embedding.embed_query(query)
class OllamaEmbeddingAdapter(BaseEmbeddingAdapter):
"""
其接口路径为 /api/embeddings
"""
def __init__(self, model_name: str, base_url: str):
self.model_name = model_name
self.base_url = base_url.rstrip("/")
def embed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = []
for text in texts:
vec = self._embed_single(text)
embeddings.append(vec)
return embeddings
def embed_query(self, query: str) -> List[float]:
return self._embed_single(query)
def _embed_single(self, text: str) -> List[float]:
"""
调用 Ollama 本地服务 /api/embeddings 接口,获取文本 embedding
"""
url = self.base_url.rstrip("/")
if "/api/embeddings" not in url:
if "/api" in url:
url = f"{url}/embeddings"
else:
if "/v1" in url:
url = url[:url.index("/v1")]
url = f"{url}/api/embeddings"
data = {
"model": self.model_name,
"prompt": text
}
try:
response = requests.post(url, json=data)
response.raise_for_status()
result = response.json()
if "embedding" not in result:
raise ValueError("No 'embedding' field in Ollama response.")
return result["embedding"]
except requests.exceptions.RequestException as e:
logging.error(f"Ollama embeddings request error: {e}\n{traceback.format_exc()}")
return []
class MLStudioEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 LM Studio 的 embedding 适配器
"""
def __init__(self, api_key: str, base_url: str, model_name: str):
self.url = ensure_openai_base_url_has_v1(base_url)
if not self.url.endswith('/embeddings'):
self.url = f"{self.url}/embeddings"
self.headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
self.model_name = model_name
def embed_documents(self, texts: List[str]) -> List[List[float]]:
try:
payload = {
"input": texts,
"model": self.model_name
}
response = requests.post(self.url, json=payload, headers=self.headers)
response.raise_for_status()
result = response.json()
if "data" not in result:
logging.error(f"Invalid response format from LM Studio API: {result}")
return [[]] * len(texts)
return [item.get("embedding", []) for item in result["data"]]
except requests.exceptions.RequestException as e:
logging.error(f"LM Studio API request failed: {str(e)}")
return [[]] * len(texts)
except (KeyError, IndexError, ValueError, TypeError) as e:
logging.error(f"Error parsing LM Studio API response: {str(e)}")
return [[]] * len(texts)
def embed_query(self, query: str) -> List[float]:
try:
payload = {
"input": query,
"model": self.model_name
}
response = requests.post(self.url, json=payload, headers=self.headers)
response.raise_for_status()
result = response.json()
if "data" not in result or not result["data"]:
logging.error(f"Invalid response format from LM Studio API: {result}")
return []
return result["data"][0].get("embedding", [])
except requests.exceptions.RequestException as e:
logging.error(f"LM Studio API request failed: {str(e)}")
return []
except (KeyError, IndexError, ValueError, TypeError) as e:
logging.error(f"Error parsing LM Studio API response: {str(e)}")
return []
class GeminiEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 Google Generative AI (Gemini) 接口的 Embedding 适配器
使用直接 POST 请求方式,URL 示例:
https://generativelanguage.googleapis.com/v1beta/models/text-embedding-004:embedContent?key=YOUR_API_KEY
"""
def __init__(self, api_key: str, model_name: str, base_url: str):
"""
:param api_key: 传入的 Google API Key
:param model_name: 这里一般是 "text-embedding-004"
:param base_url: e.g. https://generativelanguage.googleapis.com/v1beta/models
"""
self.api_key = api_key
self.model_name = model_name
self.base_url = base_url.rstrip("/")
def embed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = []
for text in texts:
vec = self._embed_single(text)
embeddings.append(vec)
return embeddings
def embed_query(self, query: str) -> List[float]:
return self._embed_single(query)
def _embed_single(self, text: str) -> List[float]:
"""
直接调用 Google Generative Language API (Gemini) 接口,获取文本 embedding
"""
url = f"{self.base_url}/{self.model_name}:embedContent?key={self.api_key}"
payload = {
"model": self.model_name,
"content": {
"parts": [
{"text": text}
]
}
}
try:
response = requests.post(url, json=payload)
print(response.text)
response.raise_for_status()
result = response.json()
embedding_data = result.get("embedding", {})
return embedding_data.get("values", [])
except requests.exceptions.RequestException as e:
logging.error(f"Gemini embed_content request error: {e}\n{traceback.format_exc()}")
return []
except Exception as e:
logging.error(f"Gemini embed_content parse error: {e}\n{traceback.format_exc()}")
return []
class SiliconFlowEmbeddingAdapter(BaseEmbeddingAdapter):
"""
基于 SiliconFlow 的 embedding 适配器
"""
def __init__(self, api_key: str, base_url: str, model_name: str):
# 自动为 base_url 添加 scheme(如果缺失)
if not base_url.startswith("http://") and not base_url.startswith("https://"):
base_url = "https://" + base_url
self.url = base_url if base_url else "https://api.siliconflow.cn/v1/embeddings"
self.payload = {
"model": model_name,
"input": "Silicon flow embedding online: fast, affordable, and high-quality embedding services. come try it out!",
"encoding_format": "float"
}
self.headers = {
"Authorization": "Bearer {api_key}".format(api_key=api_key),
"Content-Type": "application/json"
}
def embed_documents(self, texts: List[str]) -> List[List[float]]:
embeddings = []
for text in texts:
try:
self.payload["input"] = text
response = requests.post(self.url, json=self.payload, headers=self.headers)
response.raise_for_status()
result = response.json()
if not result or "data" not in result or not result["data"]:
logging.error(f"Invalid response format from SiliconFlow API: {result}")
embeddings.append([])
continue
emb = result["data"][0].get("embedding", [])
embeddings.append(emb)
except requests.exceptions.RequestException as e:
logging.error(f"SiliconFlow API request failed: {str(e)}")
embeddings.append([])
except (KeyError, IndexError, ValueError, TypeError) as e:
logging.error(f"Error parsing SiliconFlow API response: {str(e)}")
embeddings.append([])
return embeddings
def embed_query(self, query: str) -> List[float]:
try:
self.payload["input"] = query
response = requests.post(self.url, json=self.payload, headers=self.headers)
response.raise_for_status()
result = response.json()
if not result or "data" not in result or not result["data"]:
logging.error(f"Invalid response format from SiliconFlow API: {result}")
return []
return result["data"][0].get("embedding", [])
except requests.exceptions.RequestException as e:
logging.error(f"SiliconFlow API request failed: {str(e)}")
return []
except (KeyError, IndexError, ValueError, TypeError) as e:
logging.error(f"Error parsing SiliconFlow API response: {str(e)}")
return []
def create_embedding_adapter(
interface_format: str,
api_key: str,
base_url: str,
model_name: str
) -> BaseEmbeddingAdapter:
"""
工厂函数:根据 interface_format 返回不同的 embedding 适配器实例
"""
fmt = interface_format.strip().lower()
if fmt == "openai":
return OpenAIEmbeddingAdapter(api_key, base_url, model_name)
elif fmt == "azure openai":
return AzureOpenAIEmbeddingAdapter(api_key, base_url, model_name)
elif fmt == "ollama":
return OllamaEmbeddingAdapter(model_name, base_url)
elif fmt == "ml studio":
return MLStudioEmbeddingAdapter(api_key, base_url, model_name)
elif fmt == "gemini":
return GeminiEmbeddingAdapter(api_key, model_name, base_url)
elif fmt == "siliconflow":
return SiliconFlowEmbeddingAdapter(api_key, base_url, model_name)
else:
raise ValueError(f"Unknown embedding interface_format: {interface_format}")