diff --git a/examples/deepseek/.env.example b/examples/deepseek/.env.example new file mode 100644 index 00000000..a3994882 --- /dev/null +++ b/examples/deepseek/.env.example @@ -0,0 +1 @@ +DEEPSEEK_API_KEY=your_deepseek_api_key_here diff --git a/examples/deepseek/README.md b/examples/deepseek/README.md new file mode 100644 index 00000000..77a956fe --- /dev/null +++ b/examples/deepseek/README.md @@ -0,0 +1,42 @@ +# Memori + DeepSeek Example + +This example demonstrates how to use Memori with DeepSeek AI. + +## Setup + +1. Install dependencies: +```bash +pip install -r requirements.txt +``` + +2. Set up your DeepSeek API key: +```bash +export DEEPSEEK_API_KEY="your-api-key" +``` + +Or create a `.env` file: +```bash +cp .env.example .env +# Edit .env and add your API key +``` + +3. Run the example: +```bash +python main.py +``` + +## How It Works + +This example shows Memori's memory capabilities with DeepSeek: + +1. **First conversation**: Establishes facts (favorite color, city) +2. **Second conversation**: DeepSeek automatically recalls the favorite color +3. **Third conversation**: DeepSeek remembers the city + +Memori automatically captures and stores conversation context, making it available for future interactions. + +## Requirements + +- Python 3.10+ +- DeepSeek API key +- Memori Python SDK diff --git a/examples/deepseek/main.py b/examples/deepseek/main.py new file mode 100644 index 00000000..76b77285 --- /dev/null +++ b/examples/deepseek/main.py @@ -0,0 +1,60 @@ +# Quickstart: Memori + DeepSeek + SQLite + +# Demonstrates how Memori adds memory across conversations with DeepSeek. + +import os +from dotenv import load_dotenv + +load_dotenv() + +# Note: DeepSeek uses OpenAI-compatible API +from openai import OpenAI +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from memori import Memori + +# Setup DeepSeek client (OpenAI-compatible API) +client = OpenAI( + api_key=os.getenv("DEEPSEEK_API_KEY", ""), + base_url="https://api.deepseek.com/v1", +) + +# Setup SQLite +engine = create_engine("sqlite:///deepseek_memori.db") +Session = sessionmaker(bind=engine) + +# Setup Memori with DeepSeek +mem = Memori(conn=Session).deepseek.register(client) +mem.attribution(entity_id="user-123", process_id="deepseek-app") +mem.config.storage.build() + +if __name__ == "__main__": + # First conversation - establish facts + print("背景信息:我叫berry peng,是一名agent开发工程师,目前住在上海") + response1 = client.chat.completions.create( + model="deepseek-chat", + messages=[ + { + "role": "user", + "content": "我叫berry peng,是一名agent开发工程师,目前住在上海", + } + ], + ) + print(f"AI: {response1.choices[0].message.content}\n") + + # Second conversation - Memori recalls context automatically + print("You: 我住在哪?") + response2 = client.chat.completions.create( + model="deepseek-chat", + messages=[{"role": "user", "content": "我住在哪?"}], + ) + print(f"AI: {response2.choices[0].message.content}\n") + + # Third conversation - context is maintained + print("You: 我的职业是什么") + response3 = client.chat.completions.create( + model="deepseek-chat", + messages=[{"role": "user", "content": "我的职业是什么"}], + ) + print(f"AI: {response3.choices[0].message.content}") diff --git a/examples/deepseek/pyproject.toml b/examples/deepseek/pyproject.toml new file mode 100644 index 00000000..363b1d0f --- /dev/null +++ b/examples/deepseek/pyproject.toml @@ -0,0 +1,9 @@ +[project] +name = "deepseek-example" +version = "0.1.0" +description = "Memori + DeepSeek example" +dependencies = [ + "memori", + "openai>=1.0.0", + "sqlalchemy>=2.0.0", +] diff --git a/examples/deepseek/requirements.txt b/examples/deepseek/requirements.txt new file mode 100644 index 00000000..0c41cf0a --- /dev/null +++ b/examples/deepseek/requirements.txt @@ -0,0 +1,3 @@ +memori +openai>=1.0.0 +sqlalchemy>=2.0.0 diff --git a/memori/__init__.py b/memori/__init__.py index 59d95e88..629182d6 100644 --- a/memori/__init__.py +++ b/memori/__init__.py @@ -21,6 +21,7 @@ from memori.llm._providers import Anthropic as LlmProviderAnthropic from memori.llm._providers import Google as LlmProviderGoogle from memori.llm._providers import LangChain as LlmProviderLangChain +from memori.llm._providers import DeepSeek as LlmProviderDeepSeek from memori.llm._providers import OpenAi as LlmProviderOpenAi from memori.llm._providers import PydanticAi as LlmProviderPydanticAi from memori.llm._providers import XAi as LlmProviderXAi @@ -80,6 +81,7 @@ def __init__(self, conn: Callable[[], Any] | Any | None = None): self.llm = LlmRegistry(self) self.agno = LlmProviderAgno(self) self.anthropic = LlmProviderAnthropic(self) + self.deepseek = LlmProviderDeepSeek(self) self.google = LlmProviderGoogle(self) self.langchain = LlmProviderLangChain(self) self.openai = LlmProviderOpenAi(self) diff --git a/memori/llm/__init__.py b/memori/llm/__init__.py index 3758faaf..8f410c41 100644 --- a/memori/llm/__init__.py +++ b/memori/llm/__init__.py @@ -10,6 +10,6 @@ from memori.llm import _clients # noqa: F401 from memori.llm._registry import Registry -from memori.llm.adapters import anthropic, bedrock, google, openai, xai # noqa: F401 +from memori.llm.adapters import anthropic, bedrock, deepseek, google, openai, xai # noqa: F401 __all__ = ["Registry"] diff --git a/memori/llm/_base.py b/memori/llm/_base.py index 5e5ea8e7..f7e76fd2 100644 --- a/memori/llm/_base.py +++ b/memori/llm/_base.py @@ -27,6 +27,7 @@ agno_is_xai, llm_is_anthropic, llm_is_bedrock, + llm_is_deepseek, llm_is_google, llm_is_openai, llm_is_xai, @@ -322,6 +323,7 @@ def inject_conversation_messages(self, kwargs: dict) -> dict: llm_is_openai(self.config.framework.provider, self.config.llm.provider) or agno_is_openai(self.config.framework.provider, self.config.llm.provider) or agno_is_xai(self.config.framework.provider, self.config.llm.provider) + or llm_is_deepseek(self.config.framework.provider, self.config.llm.provider) ): kwargs["messages"] = messages + kwargs["messages"] elif ( diff --git a/memori/llm/_clients.py b/memori/llm/_clients.py index 3a99bf79..a35ba4b5 100644 --- a/memori/llm/_clients.py +++ b/memori/llm/_clients.py @@ -449,6 +449,63 @@ def register(self, client, _provider=None, stream=False): return self +@Registry.register_client(lambda client: "deepseek" in str(type(client).__module__).lower()) +class DeepSeek(BaseClient): + def register(self, client, _provider=None, stream=False): + from memori.llm._constants import DEEPSEEK_LLM_PROVIDER + + if not hasattr(client, "chat"): + raise RuntimeError("client provided is not instance of DeepSeek") + + if not hasattr(client, "_memori_installed"): + client.chat._completions_create = client.chat.completions.create + + try: + asyncio.get_running_loop() + + if stream is True: + client.chat.completions.create = ( + InvokeAsyncStream( + self.config, + client.chat._completions_create, + ) + .set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None)) + .invoke + ) + else: + client.chat.completions.create = ( + InvokeAsync( + self.config, + client.chat._completions_create, + ) + .set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None)) + .invoke + ) + except RuntimeError: + if stream is True: + client.chat.completions.create = ( + InvokeStream( + self.config, + client.chat._completions_create, + ) + .set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None)) + .invoke + ) + else: + client.chat.completions.create = ( + Invoke( + self.config, + client.chat._completions_create, + ) + .set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None)) + .invoke + ) + + client._memori_installed = True + + return self + + @Registry.register_client( lambda client: type(client).__module__.startswith("pydantic_ai") ) diff --git a/memori/llm/_constants.py b/memori/llm/_constants.py index 9811b5a4..d2eae841 100644 --- a/memori/llm/_constants.py +++ b/memori/llm/_constants.py @@ -14,6 +14,7 @@ AGNO_OPENAI_LLM_PROVIDER = "openai" AGNO_XAI_LLM_PROVIDER = "xai" ATHROPIC_LLM_PROVIDER = "anthropic" +DEEPSEEK_LLM_PROVIDER = "deepseek" GOOGLE_LLM_PROVIDER = "google" LANGCHAIN_CHATBEDROCK_LLM_PROVIDER = "chatbedrock" LANGCHAIN_CHATGOOGLEGENAI_LLM_PROVIDER = "chatgooglegenai" diff --git a/memori/llm/_providers.py b/memori/llm/_providers.py index 9fc6dafe..f06ac995 100644 --- a/memori/llm/_providers.py +++ b/memori/llm/_providers.py @@ -18,6 +18,7 @@ from memori.llm._clients import OpenAi as OpenAiMemoriClient from memori.llm._clients import PydanticAi as PydanticAiMemoriClient from memori.llm._clients import XAi as XAiMemoriClient +from memori.llm._clients import DeepSeek as DeepSeekMemoriClient class Agno(BaseProvider): @@ -51,6 +52,14 @@ def register(self, client): return self.entity +class DeepSeek(BaseProvider): + def register(self, client): + if self.client is None: + self.client = DeepSeekMemoriClient(self.config).register(client) + + return self.entity + + class Google(BaseProvider): def register(self, client): warnings.warn( diff --git a/memori/llm/_utils.py b/memori/llm/_utils.py index 603a22e8..6e9f28ac 100644 --- a/memori/llm/_utils.py +++ b/memori/llm/_utils.py @@ -15,6 +15,7 @@ AGNO_OPENAI_LLM_PROVIDER, AGNO_XAI_LLM_PROVIDER, ATHROPIC_LLM_PROVIDER, + DEEPSEEK_LLM_PROVIDER, GOOGLE_LLM_PROVIDER, LANGCHAIN_CHATBEDROCK_LLM_PROVIDER, LANGCHAIN_CHATGOOGLEGENAI_LLM_PROVIDER, @@ -42,6 +43,10 @@ def llm_is_bedrock(provider, title): ) +def llm_is_deepseek(provider, title): + return title == DEEPSEEK_LLM_PROVIDER + + def llm_is_google(provider, title): return title == GOOGLE_LLM_PROVIDER or ( provider_is_langchain(provider) @@ -64,6 +69,10 @@ def agno_is_anthropic(provider, title): return provider_is_agno(provider) and title == AGNO_ANTHROPIC_LLM_PROVIDER +def agno_is_deepseek(provider, title): + return provider_is_agno(provider) and title == DEEPSEEK_LLM_PROVIDER + + def agno_is_google(provider, title): return provider_is_agno(provider) and title == AGNO_GOOGLE_LLM_PROVIDER diff --git a/memori/llm/adapters/deepseek/__init__.py b/memori/llm/adapters/deepseek/__init__.py new file mode 100644 index 00000000..1b070d46 --- /dev/null +++ b/memori/llm/adapters/deepseek/__init__.py @@ -0,0 +1,13 @@ +r""" + __ __ _ +| \/ | ___ _ __ ___ ___ _ __(_) +| |\/| |/ _ \ '_ ` _ \ / _ \| '__| | +| | | | __/ | | | | | (_) | | | | +|_| |_|\___|_| |_| |_|\___/|_| |_| + perfectam memoriam + memorilabs.ai +""" + +from ._adapter import Adapter + +__all__ = ["Adapter"] diff --git a/memori/llm/adapters/deepseek/_adapter.py b/memori/llm/adapters/deepseek/_adapter.py new file mode 100644 index 00000000..2dfc149f --- /dev/null +++ b/memori/llm/adapters/deepseek/_adapter.py @@ -0,0 +1,93 @@ +r""" + __ __ _ +| \/ | ___ _ __ ___ ___ _ __(_) +| |\/| |/ _ \ '_ ` _ \ / _ \| '__| | +| | | | __/ | | | | | (_) | | | | +|_| |_|\___|_| |_| |_|\___/|_| |_| + perfectam memoriam + memorilabs.ai +""" + +from memori.llm._base import BaseLlmAdaptor +from memori.llm._registry import Registry +from memori.llm._utils import llm_is_deepseek + + +@Registry.register_adapter(llm_is_deepseek) +class Adapter(BaseLlmAdaptor): + def get_formatted_query(self, payload): + """ + [ + { + "content": "...", + "role": "..." + } + ] + """ + + try: + messages = payload["conversation"]["query"].get("messages", []) + return self._exclude_injected_messages(messages, payload) + except KeyError: + return [] + + def get_formatted_response(self, payload): + try: + choices = payload["conversation"]["response"].get("choices", None) + except KeyError: + return [] + + response = [] + if choices is not None: + if payload["conversation"]["query"].get("stream", None) is None: + # Unstreamed + # [ + # { + # "finish_reason": "...", + # "index": ..., + # "message": { + # "content": "...", + # "role": "..." + # } + # } + # ] + for choice in choices: + message = choice.get("message", None) + if message is not None: + content = message.get("content", None) + if content is not None: + response.append( + { + "role": message["role"], + "text": content, + "type": "text", + } + ) + else: + # Streamed + # [ + # { + # "delta": { + # "content": "...", + # "role": "..." + # } + # } + # ] + content = [] + role = None + for choice in choices: + delta = choice.get("delta", None) + if delta is not None: + if role is None: + role = delta.get("role", None) + + text_content = delta.get("content", None) + if text_content is not None and len(text_content) > 0: + content.append(text_content) + + if len(content) > 0: + response.append( + {"role": role, "text": "".join(content), "type": "text"} + ) + + return response