Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions langgraph/store/redis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import asyncio
import json
import math
import uuid
from contextlib import contextmanager
from datetime import datetime, timezone
from typing import Any, Iterable, Iterator, Optional, Sequence, cast
Expand All @@ -14,6 +13,7 @@
from redisvl.query import FilterQuery, VectorQuery
from redisvl.redis.connection import RedisConnectionFactory
from redisvl.utils.token_escaper import TokenEscaper
from ulid import ULID

from langgraph.store.base import (
BaseStore,
Expand Down Expand Up @@ -223,7 +223,7 @@ def _batch_put_ops(
# Generate IDs for PUT operations
for _, op in put_ops:
if op.value is not None:
generated_doc_id = uuid.uuid4().hex
generated_doc_id = str(ULID())
namespace = _namespace_to_text(op.namespace)
doc_ids[(namespace, op.key)] = generated_doc_id

Expand Down
4 changes: 2 additions & 2 deletions langgraph/store/redis/aio.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import asyncio
import json
import uuid
import weakref
from contextlib import asynccontextmanager
from datetime import datetime, timezone
Expand All @@ -13,6 +12,7 @@
from redisvl.query import FilterQuery, VectorQuery
from redisvl.redis.connection import RedisConnectionFactory
from redisvl.utils.token_escaper import TokenEscaper
from ulid import ULID

from langgraph.store.base import (
BaseStore,
Expand Down Expand Up @@ -398,7 +398,7 @@ async def _batch_put_ops(
# Generate IDs for PUT operations
for _, op in put_ops:
if op.value is not None:
generated_doc_id = uuid.uuid4().hex
generated_doc_id = str(ULID())
namespace = _namespace_to_text(op.namespace)
doc_ids[(namespace, op.key)] = generated_doc_id

Expand Down
17 changes: 16 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ python = ">=3.9,<3.13"
langgraph-checkpoint = "^2.0.10"
redisvl = "^0.3.7"
redis = "^5.2.1"
python-ulid = "^3.0.0"

[tool.poetry.group.dev.dependencies]
ruff = "^0.6.2"
Expand Down
4 changes: 2 additions & 2 deletions tests/test_async_store.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Tests for AsyncRedisStore."""

import uuid
from typing import Any, AsyncGenerator, Dict, Sequence, cast

import pytest
Expand All @@ -9,6 +8,7 @@
from langchain_core.runnables import RunnableConfig
from langchain_openai import OpenAIEmbeddings
from redis.asyncio import Redis
from ulid import ULID

from langgraph.checkpoint.redis import AsyncRedisSaver
from langgraph.constants import START
Expand Down Expand Up @@ -525,7 +525,7 @@ def call_model(
# Store new memories if the user asks the model to remember
if "remember" in last_message.content.lower(): # type:ignore[union-attr]
memory = "User name is Bob"
store.put(namespace, str(uuid.uuid4()), {"data": memory})
store.put(namespace, str(ULID()), {"data": memory})

messages = [{"role": "system", "content": system_msg}]
messages.extend([msg.model_dump() for msg in state["messages"]])
Expand Down
4 changes: 2 additions & 2 deletions tests/test_store.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import uuid
from typing import Any, Dict, Sequence, cast

import pytest
Expand All @@ -7,6 +6,7 @@
from langchain_core.runnables import RunnableConfig
from langchain_openai import OpenAIEmbeddings
from redis import Redis
from ulid import ULID

from langgraph.checkpoint.redis import RedisSaver
from langgraph.graph import START, MessagesState, StateGraph
Expand Down Expand Up @@ -464,7 +464,7 @@ def call_model(
# Store new memories if the user asks the model to remember
if "remember" in last_message.content.lower(): # type:ignore[union-attr]
memory = "User name is Bob"
store.put(namespace, str(uuid.uuid4()), {"data": memory})
store.put(namespace, str(ULID()), {"data": memory})

messages = [{"role": "system", "content": system_msg}]
messages.extend([msg.model_dump() for msg in state["messages"]])
Expand Down