Skip to content

Commit 62b372b

Browse files
author
liaoliaojun
committed
fix: shallow copy config of create_embedder
1 parent c44b701 commit 62b372b

File tree

1 file changed

+20
-19
lines changed

1 file changed

+20
-19
lines changed

scrapegraphai/graphs/abstract_graph.py

Lines changed: 20 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -333,40 +333,41 @@ def _create_embedder(self, embedder_config: dict) -> object:
333333
Raises:
334334
KeyError: If the model is not supported.
335335
"""
336+
embedder_params = {**embedder_config}
336337
if "model_instance" in embedder_config:
337-
return embedder_config["model_instance"]
338+
return embedder_params["model_instance"]
338339
# Instantiate the embedding model based on the model name
339-
if "openai" in embedder_config["model"]:
340-
return OpenAIEmbeddings(api_key=embedder_config["api_key"])
341-
elif "azure" in embedder_config["model"]:
340+
if "openai" in embedder_params["model"]:
341+
return OpenAIEmbeddings(api_key=embedder_params["api_key"])
342+
elif "azure" in embedder_params["model"]:
342343
return AzureOpenAIEmbeddings()
343-
elif "ollama" in embedder_config["model"]:
344-
embedder_config["model"] = embedder_config["model"].split("ollama/")[-1]
344+
elif "ollama" in embedder_params["model"]:
345+
embedder_params["model"] = embedder_params["model"].split("ollama/")[-1]
345346
try:
346-
models_tokens["ollama"][embedder_config["model"]]
347+
models_tokens["ollama"][embedder_params["model"]]
347348
except KeyError as exc:
348349
raise KeyError("Model not supported") from exc
349-
return OllamaEmbeddings(**embedder_config)
350-
elif "hugging_face" in embedder_config["model"]:
350+
return OllamaEmbeddings(**embedder_params)
351+
elif "hugging_face" in embedder_params["model"]:
351352
try:
352-
models_tokens["hugging_face"][embedder_config["model"]]
353+
models_tokens["hugging_face"][embedder_params["model"]]
353354
except KeyError as exc:
354355
raise KeyError("Model not supported") from exc
355-
return HuggingFaceHubEmbeddings(model=embedder_config["model"])
356-
elif "gemini" in embedder_config["model"]:
356+
return HuggingFaceHubEmbeddings(model=embedder_params["model"])
357+
elif "gemini" in embedder_params["model"]:
357358
try:
358-
models_tokens["gemini"][embedder_config["model"]]
359+
models_tokens["gemini"][embedder_params["model"]]
359360
except KeyError as exc:
360361
raise KeyError("Model not supported") from exc
361-
return GoogleGenerativeAIEmbeddings(model=embedder_config["model"])
362-
elif "bedrock" in embedder_config["model"]:
363-
embedder_config["model"] = embedder_config["model"].split("/")[-1]
364-
client = embedder_config.get("client", None)
362+
return GoogleGenerativeAIEmbeddings(model=embedder_params["model"])
363+
elif "bedrock" in embedder_params["model"]:
364+
embedder_params["model"] = embedder_params["model"].split("/")[-1]
365+
client = embedder_params.get("client", None)
365366
try:
366-
models_tokens["bedrock"][embedder_config["model"]]
367+
models_tokens["bedrock"][embedder_params["model"]]
367368
except KeyError as exc:
368369
raise KeyError("Model not supported") from exc
369-
return BedrockEmbeddings(client=client, model_id=embedder_config["model"])
370+
return BedrockEmbeddings(client=client, model_id=embedder_params["model"])
370371
else:
371372
raise ValueError("Model provided by the configuration not supported")
372373

0 commit comments

Comments
 (0)