Skip to content

Commit 7e00c14

Browse files
authored
Merge pull request #183 from VinciGit00/182-googlegenerativeaiembeddings-is-not-defined
fixed gemini embeddings
2 parents 3f0e069 + 9415675 commit 7e00c14

File tree

6 files changed

+3373
-6
lines changed

6 files changed

+3373
-6
lines changed

poetry.lock

Lines changed: 3346 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

scrapegraphai/graphs/abstract_graph.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,16 @@
44
from abc import ABC, abstractmethod
55
from typing import Optional
66
from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings
7-
from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings
7+
from langchain_community.embeddings import HuggingFaceHubEmbeddings, OllamaEmbeddings, BedrockEmbeddings
8+
from langchain_google_genai import GoogleGenerativeAIEmbeddings
89
from ..helpers import models_tokens
9-
from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Ollama, OpenAI, Anthropic
10+
from ..models import AzureOpenAI, Bedrock, Gemini, Groq, HuggingFace, Ollama, OpenAI, Anthropic, Claude
1011

1112

1213
class AbstractGraph(ABC):
1314
"""
1415
Scaffolding class for creating a graph representation and executing it.
1516
16-
Attributes:
1717
prompt (str): The prompt for the graph.
1818
source (str): The source of the graph.
1919
config (dict): Configuration parameters for the graph.
@@ -162,7 +162,7 @@ def _create_llm(self, llm_config: dict, chat=False) -> object:
162162
try:
163163
self.model_token = models_tokens["ollama"][llm_params["model"]]
164164
except KeyError as exc:
165-
raise KeyError("Model not supported") from exc
165+
self.model_token = 8192
166166
else:
167167
self.model_token = 8192
168168
except AttributeError:

scrapegraphai/graphs/script_creator_graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,4 +110,4 @@ def run(self) -> str:
110110
inputs = {"user_prompt": self.prompt, self.input_key: self.source}
111111
self.final_state, self.execution_info = self.graph.execute(inputs)
112112

113-
return self.final_state.get("answer", "No answer found.")
113+
return self.final_state.get("answer", "No answer found ")

scrapegraphai/helpers/models_tokens.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,8 @@
3939
"dolphin-mixtral": 32000,
4040
"mistral-openorca": 32000,
4141
"stablelm-zephyr": 8192,
42-
"nomic-embed-text": 8192
42+
"nomic-embed-text": 8192,
43+
"mxbai-embed-large'": 8192
4344
},
4445
"groq": {
4546
"llama3-8b-8192": 8192,

scrapegraphai/models/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,4 @@
1212
from .groq import Groq
1313
from .bedrock import Bedrock
1414
from .anthropic import Anthropic
15+
from .claude import Claude

scrapegraphai/models/claude.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
"""
2+
Claude model
3+
"""
4+
from langchain_anthropic import ChatAnthropic
5+
6+
7+
class Claude(ChatAnthropic):
8+
"""Class for wrapping bedrock module"""
9+
10+
def __init__(self, llm_config: dict):
11+
"""
12+
A wrapper for the Claude class that provides default configuration
13+
and could be extended with additional methods if needed.
14+
15+
Args:
16+
llm_config (dict): Configuration parameters for the language model.
17+
"""
18+
# Initialize the superclass (ChatAnthropic) with provided config parameters
19+
super().__init__(**llm_config)

0 commit comments

Comments
 (0)