Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions examples/deepseek/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
DEEPSEEK_API_KEY=your_deepseek_api_key_here
42 changes: 42 additions & 0 deletions examples/deepseek/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# Memori + DeepSeek Example

This example demonstrates how to use Memori with DeepSeek AI.

## Setup

1. Install dependencies:
```bash
pip install -r requirements.txt
```

2. Set up your DeepSeek API key:
```bash
export DEEPSEEK_API_KEY="your-api-key"
```

Or create a `.env` file:
```bash
cp .env.example .env
# Edit .env and add your API key
```

3. Run the example:
```bash
python main.py
```

## How It Works

This example shows Memori's memory capabilities with DeepSeek:

1. **First conversation**: Establishes facts (favorite color, city)
2. **Second conversation**: DeepSeek automatically recalls the favorite color
3. **Third conversation**: DeepSeek remembers the city

Memori automatically captures and stores conversation context, making it available for future interactions.

## Requirements

- Python 3.10+
- DeepSeek API key
- Memori Python SDK
60 changes: 60 additions & 0 deletions examples/deepseek/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# Quickstart: Memori + DeepSeek + SQLite

# Demonstrates how Memori adds memory across conversations with DeepSeek.

import os
from dotenv import load_dotenv

load_dotenv()

# Note: DeepSeek uses OpenAI-compatible API
from openai import OpenAI
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

from memori import Memori

# Setup DeepSeek client (OpenAI-compatible API)
client = OpenAI(
api_key=os.getenv("DEEPSEEK_API_KEY", "<your_deepseek_api_key_here>"),
base_url="https://api.deepseek.com/v1",
)

# Setup SQLite
engine = create_engine("sqlite:///deepseek_memori.db")
Session = sessionmaker(bind=engine)

# Setup Memori with DeepSeek
mem = Memori(conn=Session).deepseek.register(client)
mem.attribution(entity_id="user-123", process_id="deepseek-app")
mem.config.storage.build()

if __name__ == "__main__":
# First conversation - establish facts
print("背景信息:我叫berry peng,是一名agent开发工程师,目前住在上海")
response1 = client.chat.completions.create(
model="deepseek-chat",
messages=[
{
"role": "user",
"content": "我叫berry peng,是一名agent开发工程师,目前住在上海",
}
],
)
print(f"AI: {response1.choices[0].message.content}\n")

# Second conversation - Memori recalls context automatically
print("You: 我住在哪?")
response2 = client.chat.completions.create(
model="deepseek-chat",
messages=[{"role": "user", "content": "我住在哪?"}],
)
print(f"AI: {response2.choices[0].message.content}\n")

# Third conversation - context is maintained
print("You: 我的职业是什么")
response3 = client.chat.completions.create(
model="deepseek-chat",
messages=[{"role": "user", "content": "我的职业是什么"}],
)
print(f"AI: {response3.choices[0].message.content}")
9 changes: 9 additions & 0 deletions examples/deepseek/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[project]
name = "deepseek-example"
version = "0.1.0"
description = "Memori + DeepSeek example"
dependencies = [
"memori",
"openai>=1.0.0",
"sqlalchemy>=2.0.0",
]
3 changes: 3 additions & 0 deletions examples/deepseek/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
memori
openai>=1.0.0
sqlalchemy>=2.0.0
2 changes: 2 additions & 0 deletions memori/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from memori.llm._providers import Anthropic as LlmProviderAnthropic
from memori.llm._providers import Google as LlmProviderGoogle
from memori.llm._providers import LangChain as LlmProviderLangChain
from memori.llm._providers import DeepSeek as LlmProviderDeepSeek
from memori.llm._providers import OpenAi as LlmProviderOpenAi
from memori.llm._providers import PydanticAi as LlmProviderPydanticAi
from memori.llm._providers import XAi as LlmProviderXAi
Expand Down Expand Up @@ -80,6 +81,7 @@ def __init__(self, conn: Callable[[], Any] | Any | None = None):
self.llm = LlmRegistry(self)
self.agno = LlmProviderAgno(self)
self.anthropic = LlmProviderAnthropic(self)
self.deepseek = LlmProviderDeepSeek(self)
self.google = LlmProviderGoogle(self)
self.langchain = LlmProviderLangChain(self)
self.openai = LlmProviderOpenAi(self)
Expand Down
2 changes: 1 addition & 1 deletion memori/llm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@

from memori.llm import _clients # noqa: F401
from memori.llm._registry import Registry
from memori.llm.adapters import anthropic, bedrock, google, openai, xai # noqa: F401
from memori.llm.adapters import anthropic, bedrock, deepseek, google, openai, xai # noqa: F401

__all__ = ["Registry"]
2 changes: 2 additions & 0 deletions memori/llm/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
agno_is_xai,
llm_is_anthropic,
llm_is_bedrock,
llm_is_deepseek,
llm_is_google,
llm_is_openai,
llm_is_xai,
Expand Down Expand Up @@ -322,6 +323,7 @@ def inject_conversation_messages(self, kwargs: dict) -> dict:
llm_is_openai(self.config.framework.provider, self.config.llm.provider)
or agno_is_openai(self.config.framework.provider, self.config.llm.provider)
or agno_is_xai(self.config.framework.provider, self.config.llm.provider)
or llm_is_deepseek(self.config.framework.provider, self.config.llm.provider)
):
kwargs["messages"] = messages + kwargs["messages"]
elif (
Expand Down
57 changes: 57 additions & 0 deletions memori/llm/_clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,63 @@ def register(self, client, _provider=None, stream=False):
return self


@Registry.register_client(lambda client: "deepseek" in str(type(client).__module__).lower())
class DeepSeek(BaseClient):
def register(self, client, _provider=None, stream=False):
from memori.llm._constants import DEEPSEEK_LLM_PROVIDER

if not hasattr(client, "chat"):
raise RuntimeError("client provided is not instance of DeepSeek")

if not hasattr(client, "_memori_installed"):
client.chat._completions_create = client.chat.completions.create

try:
asyncio.get_running_loop()

if stream is True:
client.chat.completions.create = (
InvokeAsyncStream(
self.config,
client.chat._completions_create,
)
.set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None))
.invoke
)
else:
client.chat.completions.create = (
InvokeAsync(
self.config,
client.chat._completions_create,
)
.set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None))
.invoke
)
except RuntimeError:
if stream is True:
client.chat.completions.create = (
InvokeStream(
self.config,
client.chat._completions_create,
)
.set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None))
.invoke
)
else:
client.chat.completions.create = (
Invoke(
self.config,
client.chat._completions_create,
)
.set_client(_provider, DEEPSEEK_LLM_PROVIDER, getattr(client, "_version", None))
.invoke
)

client._memori_installed = True

return self


@Registry.register_client(
lambda client: type(client).__module__.startswith("pydantic_ai")
)
Expand Down
1 change: 1 addition & 0 deletions memori/llm/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
AGNO_OPENAI_LLM_PROVIDER = "openai"
AGNO_XAI_LLM_PROVIDER = "xai"
ATHROPIC_LLM_PROVIDER = "anthropic"
DEEPSEEK_LLM_PROVIDER = "deepseek"
GOOGLE_LLM_PROVIDER = "google"
LANGCHAIN_CHATBEDROCK_LLM_PROVIDER = "chatbedrock"
LANGCHAIN_CHATGOOGLEGENAI_LLM_PROVIDER = "chatgooglegenai"
Expand Down
9 changes: 9 additions & 0 deletions memori/llm/_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from memori.llm._clients import OpenAi as OpenAiMemoriClient
from memori.llm._clients import PydanticAi as PydanticAiMemoriClient
from memori.llm._clients import XAi as XAiMemoriClient
from memori.llm._clients import DeepSeek as DeepSeekMemoriClient


class Agno(BaseProvider):
Expand Down Expand Up @@ -51,6 +52,14 @@ def register(self, client):
return self.entity


class DeepSeek(BaseProvider):
def register(self, client):
if self.client is None:
self.client = DeepSeekMemoriClient(self.config).register(client)

return self.entity


class Google(BaseProvider):
def register(self, client):
warnings.warn(
Expand Down
9 changes: 9 additions & 0 deletions memori/llm/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
AGNO_OPENAI_LLM_PROVIDER,
AGNO_XAI_LLM_PROVIDER,
ATHROPIC_LLM_PROVIDER,
DEEPSEEK_LLM_PROVIDER,
GOOGLE_LLM_PROVIDER,
LANGCHAIN_CHATBEDROCK_LLM_PROVIDER,
LANGCHAIN_CHATGOOGLEGENAI_LLM_PROVIDER,
Expand Down Expand Up @@ -42,6 +43,10 @@ def llm_is_bedrock(provider, title):
)


def llm_is_deepseek(provider, title):
return title == DEEPSEEK_LLM_PROVIDER


def llm_is_google(provider, title):
return title == GOOGLE_LLM_PROVIDER or (
provider_is_langchain(provider)
Expand All @@ -64,6 +69,10 @@ def agno_is_anthropic(provider, title):
return provider_is_agno(provider) and title == AGNO_ANTHROPIC_LLM_PROVIDER


def agno_is_deepseek(provider, title):
return provider_is_agno(provider) and title == DEEPSEEK_LLM_PROVIDER


def agno_is_google(provider, title):
return provider_is_agno(provider) and title == AGNO_GOOGLE_LLM_PROVIDER

Expand Down
13 changes: 13 additions & 0 deletions memori/llm/adapters/deepseek/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
r"""
__ __ _
| \/ | ___ _ __ ___ ___ _ __(_)
| |\/| |/ _ \ '_ ` _ \ / _ \| '__| |
| | | | __/ | | | | | (_) | | | |
|_| |_|\___|_| |_| |_|\___/|_| |_|
perfectam memoriam
memorilabs.ai
"""

from ._adapter import Adapter

__all__ = ["Adapter"]
93 changes: 93 additions & 0 deletions memori/llm/adapters/deepseek/_adapter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
r"""
__ __ _
| \/ | ___ _ __ ___ ___ _ __(_)
| |\/| |/ _ \ '_ ` _ \ / _ \| '__| |
| | | | __/ | | | | | (_) | | | |
|_| |_|\___|_| |_| |_|\___/|_| |_|
perfectam memoriam
memorilabs.ai
"""

from memori.llm._base import BaseLlmAdaptor
from memori.llm._registry import Registry
from memori.llm._utils import llm_is_deepseek


@Registry.register_adapter(llm_is_deepseek)
class Adapter(BaseLlmAdaptor):
def get_formatted_query(self, payload):
"""
[
{
"content": "...",
"role": "..."
}
]
"""

try:
messages = payload["conversation"]["query"].get("messages", [])
return self._exclude_injected_messages(messages, payload)
except KeyError:
return []

def get_formatted_response(self, payload):
try:
choices = payload["conversation"]["response"].get("choices", None)
except KeyError:
return []

response = []
if choices is not None:
if payload["conversation"]["query"].get("stream", None) is None:
# Unstreamed
# [
# {
# "finish_reason": "...",
# "index": ...,
# "message": {
# "content": "...",
# "role": "..."
# }
# }
# ]
for choice in choices:
message = choice.get("message", None)
if message is not None:
content = message.get("content", None)
if content is not None:
response.append(
{
"role": message["role"],
"text": content,
"type": "text",
}
)
else:
# Streamed
# [
# {
# "delta": {
# "content": "...",
# "role": "..."
# }
# }
# ]
content = []
role = None
for choice in choices:
delta = choice.get("delta", None)
if delta is not None:
if role is None:
role = delta.get("role", None)

text_content = delta.get("content", None)
if text_content is not None and len(text_content) > 0:
content.append(text_content)

if len(content) > 0:
response.append(
{"role": role, "text": "".join(content), "type": "text"}
)

return response