Skip to content

Commit fd44d65

Browse files
committed
Fix fastmcp deprecation warning by moving log_level to run() method
- Remove log_level from FastMCP constructor in server.py - Add log_level='DEBUG' to mcp_server.run() calls in mcp.py - Fixes deprecation warning that appeared when running 'bm --version' - Follows fastmcp recommendation to provide log_level when calling run()
1 parent b8191d0 commit fd44d65

File tree

9 files changed

+126
-121
lines changed

9 files changed

+126
-121
lines changed

src/basic_memory/cli/commands/mcp.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,11 +78,13 @@ def run_file_sync():
7878
if transport == "stdio":
7979
mcp_server.run(
8080
transport=transport,
81+
log_level="DEBUG",
8182
)
8283
elif transport == "streamable-http" or transport == "sse":
8384
mcp_server.run(
8485
transport=transport,
8586
host=host,
8687
port=port,
8788
path=path,
89+
log_level="DEBUG",
8890
)

src/basic_memory/db.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -95,11 +95,12 @@ async def get_or_create_db(
9595

9696
if _engine is None:
9797
_engine, _session_maker = _create_engine_and_session(db_path, db_type)
98-
98+
9999
# Run migrations automatically unless explicitly disabled
100100
if ensure_migrations:
101101
if app_config is None:
102102
from basic_memory.config import app_config as global_app_config
103+
103104
app_config = global_app_config
104105
await run_migrations(app_config, db_type)
105106

@@ -170,12 +171,12 @@ async def run_migrations(
170171
): # pragma: no cover
171172
"""Run any pending alembic migrations."""
172173
global _migrations_completed
173-
174+
174175
# Skip if migrations already completed unless forced
175176
if _migrations_completed and not force:
176177
logger.debug("Migrations already completed in this session, skipping")
177178
return
178-
179+
179180
logger.info("Running database migrations...")
180181
try:
181182
# Get the absolute path to the alembic directory relative to this file
@@ -206,7 +207,7 @@ async def run_migrations(
206207
# initialize the search Index schema
207208
# the project_id is not used for init_search_index, so we pass a dummy value
208209
await SearchRepository(session_maker, 1).init_search_index()
209-
210+
210211
# Mark migrations as completed
211212
_migrations_completed = True
212213
except Exception as e: # pragma: no cover

src/basic_memory/mcp/server.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,5 @@ def create_auth_config() -> tuple[AuthSettings | None, Any | None]:
105105
# Create the shared server instance
106106
mcp = FastMCP(
107107
name="Basic Memory",
108-
log_level="DEBUG",
109108
auth=auth_provider,
110109
)

src/basic_memory/repository/entity_repository.py

Lines changed: 42 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -102,44 +102,43 @@ async def find_by_permalinks(self, permalinks: List[str]) -> Sequence[Entity]:
102102

103103
async def upsert_entity(self, entity: Entity) -> Entity:
104104
"""Insert or update entity using a hybrid approach.
105-
105+
106106
This method provides a cleaner alternative to the try/catch approach
107-
for handling permalink and file_path conflicts. It first tries direct
107+
for handling permalink and file_path conflicts. It first tries direct
108108
insertion, then handles conflicts intelligently.
109-
109+
110110
Args:
111111
entity: The entity to insert or update
112-
112+
113113
Returns:
114114
The inserted or updated entity
115115
"""
116116

117117
async with db.scoped_session(self.session_maker) as session:
118118
# Set project_id if applicable and not already set
119119
self._set_project_id_if_needed(entity)
120-
120+
121121
# Check for existing entity with same file_path first
122122
existing_by_path = await session.execute(
123123
select(Entity).where(
124-
Entity.file_path == entity.file_path,
125-
Entity.project_id == entity.project_id
124+
Entity.file_path == entity.file_path, Entity.project_id == entity.project_id
126125
)
127126
)
128127
existing_path_entity = existing_by_path.scalar_one_or_none()
129-
128+
130129
if existing_path_entity:
131130
# Update existing entity with same file path
132131
for key, value in {
133-
'title': entity.title,
134-
'entity_type': entity.entity_type,
135-
'entity_metadata': entity.entity_metadata,
136-
'content_type': entity.content_type,
137-
'permalink': entity.permalink,
138-
'checksum': entity.checksum,
139-
'updated_at': entity.updated_at,
132+
"title": entity.title,
133+
"entity_type": entity.entity_type,
134+
"entity_metadata": entity.entity_metadata,
135+
"content_type": entity.content_type,
136+
"permalink": entity.permalink,
137+
"checksum": entity.checksum,
138+
"updated_at": entity.updated_at,
140139
}.items():
141140
setattr(existing_path_entity, key, value)
142-
141+
143142
await session.flush()
144143
# Return with relationships loaded
145144
query = (
@@ -150,15 +149,17 @@ async def upsert_entity(self, entity: Entity) -> Entity:
150149
result = await session.execute(query)
151150
found = result.scalar_one_or_none()
152151
if not found: # pragma: no cover
153-
raise RuntimeError(f"Failed to retrieve entity after update: {entity.file_path}")
152+
raise RuntimeError(
153+
f"Failed to retrieve entity after update: {entity.file_path}"
154+
)
154155
return found
155-
156+
156157
# No existing entity with same file_path, try insert
157158
try:
158159
# Simple insert for new entity
159160
session.add(entity)
160161
await session.flush()
161-
162+
162163
# Return with relationships loaded
163164
query = (
164165
select(Entity)
@@ -168,36 +169,37 @@ async def upsert_entity(self, entity: Entity) -> Entity:
168169
result = await session.execute(query)
169170
found = result.scalar_one_or_none()
170171
if not found: # pragma: no cover
171-
raise RuntimeError(f"Failed to retrieve entity after insert: {entity.file_path}")
172+
raise RuntimeError(
173+
f"Failed to retrieve entity after insert: {entity.file_path}"
174+
)
172175
return found
173-
176+
174177
except IntegrityError:
175178
# Could be either file_path or permalink conflict
176179
await session.rollback()
177-
180+
178181
# Check if it's a file_path conflict (race condition)
179182
existing_by_path_check = await session.execute(
180183
select(Entity).where(
181-
Entity.file_path == entity.file_path,
182-
Entity.project_id == entity.project_id
184+
Entity.file_path == entity.file_path, Entity.project_id == entity.project_id
183185
)
184186
)
185187
race_condition_entity = existing_by_path_check.scalar_one_or_none()
186-
188+
187189
if race_condition_entity:
188190
# Race condition: file_path conflict detected after our initial check
189191
# Update the existing entity instead
190192
for key, value in {
191-
'title': entity.title,
192-
'entity_type': entity.entity_type,
193-
'entity_metadata': entity.entity_metadata,
194-
'content_type': entity.content_type,
195-
'permalink': entity.permalink,
196-
'checksum': entity.checksum,
197-
'updated_at': entity.updated_at,
193+
"title": entity.title,
194+
"entity_type": entity.entity_type,
195+
"entity_metadata": entity.entity_metadata,
196+
"content_type": entity.content_type,
197+
"permalink": entity.permalink,
198+
"checksum": entity.checksum,
199+
"updated_at": entity.updated_at,
198200
}.items():
199201
setattr(race_condition_entity, key, value)
200-
202+
201203
await session.flush()
202204
# Return the updated entity with relationships loaded
203205
query = (
@@ -208,7 +210,9 @@ async def upsert_entity(self, entity: Entity) -> Entity:
208210
result = await session.execute(query)
209211
found = result.scalar_one_or_none()
210212
if not found: # pragma: no cover
211-
raise RuntimeError(f"Failed to retrieve entity after race condition update: {entity.file_path}")
213+
raise RuntimeError(
214+
f"Failed to retrieve entity after race condition update: {entity.file_path}"
215+
)
212216
return found
213217
else:
214218
# Must be permalink conflict - generate unique permalink
@@ -218,26 +222,25 @@ async def _handle_permalink_conflict(self, entity: Entity, session: AsyncSession
218222
"""Handle permalink conflicts by generating a unique permalink."""
219223
base_permalink = entity.permalink
220224
suffix = 1
221-
225+
222226
# Find a unique permalink
223227
while True:
224228
test_permalink = f"{base_permalink}-{suffix}"
225229
existing = await session.execute(
226230
select(Entity).where(
227-
Entity.permalink == test_permalink,
228-
Entity.project_id == entity.project_id
231+
Entity.permalink == test_permalink, Entity.project_id == entity.project_id
229232
)
230233
)
231234
if existing.scalar_one_or_none() is None:
232235
# Found unique permalink
233236
entity.permalink = test_permalink
234237
break
235238
suffix += 1
236-
239+
237240
# Insert with unique permalink (no conflict possible now)
238241
session.add(entity)
239242
await session.flush()
240-
243+
241244
# Return the inserted entity with relationships loaded
242245
query = (
243246
select(Entity)

src/basic_memory/services/entity_service.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -302,15 +302,15 @@ async def create_entity_from_markdown(
302302
303303
Creates the entity with null checksum to indicate sync not complete.
304304
Relations will be added in second pass.
305-
305+
306306
Uses UPSERT approach to handle permalink/file_path conflicts cleanly.
307307
"""
308308
logger.debug(f"Creating entity: {markdown.frontmatter.title} file_path: {file_path}")
309309
model = entity_model_from_markdown(file_path, markdown)
310310

311311
# Mark as incomplete because we still need to add relations
312312
model.checksum = None
313-
313+
314314
# Use UPSERT to handle conflicts cleanly
315315
try:
316316
return await self.repository.upsert_entity(model)

src/basic_memory/services/initialization.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,9 @@ async def initialize_database(app_config: BasicMemoryConfig) -> None:
2121
2222
Args:
2323
app_config: The Basic Memory project configuration
24-
24+
2525
Note:
26-
Database migrations are now handled automatically when the database
26+
Database migrations are now handled automatically when the database
2727
connection is first established via get_or_create_db().
2828
"""
2929
# Trigger database initialization and migrations by getting the database connection
@@ -50,7 +50,9 @@ async def reconcile_projects_with_config(app_config: BasicMemoryConfig):
5050

5151
# Get database session - migrations handled centrally
5252
_, session_maker = await db.get_or_create_db(
53-
db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM, ensure_migrations=False
53+
db_path=app_config.database_path,
54+
db_type=db.DatabaseType.FILESYSTEM,
55+
ensure_migrations=False,
5456
)
5557
project_repository = ProjectRepository(session_maker)
5658

@@ -71,7 +73,9 @@ async def reconcile_projects_with_config(app_config: BasicMemoryConfig):
7173
async def migrate_legacy_projects(app_config: BasicMemoryConfig):
7274
# Get database session - migrations handled centrally
7375
_, session_maker = await db.get_or_create_db(
74-
db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM, ensure_migrations=False
76+
db_path=app_config.database_path,
77+
db_type=db.DatabaseType.FILESYSTEM,
78+
ensure_migrations=False,
7579
)
7680
logger.info("Migrating legacy projects...")
7781
project_repository = ProjectRepository(session_maker)
@@ -140,7 +144,9 @@ async def initialize_file_sync(
140144

141145
# Load app configuration - migrations handled centrally
142146
_, session_maker = await db.get_or_create_db(
143-
db_path=app_config.database_path, db_type=db.DatabaseType.FILESYSTEM, ensure_migrations=False
147+
db_path=app_config.database_path,
148+
db_type=db.DatabaseType.FILESYSTEM,
149+
ensure_migrations=False,
144150
)
145151
project_repository = ProjectRepository(session_maker)
146152

tests/mcp/test_tool_write_note.py

Lines changed: 16 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -418,59 +418,53 @@ async def test_write_note_preserves_content_frontmatter(app):
418418
@pytest.mark.asyncio
419419
async def test_write_note_permalink_collision_fix_issue_139(app):
420420
"""Test fix for GitHub Issue #139: UNIQUE constraint failed: entity.permalink.
421-
421+
422422
This reproduces the exact scenario described in the issue:
423-
1. Create a note with title "Note 1"
423+
1. Create a note with title "Note 1"
424424
2. Create another note with title "Note 2"
425425
3. Try to create/replace first note again with same title "Note 1"
426-
426+
427427
Before the fix, step 3 would fail with UNIQUE constraint error.
428428
After the fix, it should either update the existing note or create with unique permalink.
429429
"""
430430
# Step 1: Create first note
431431
result1 = await write_note.fn(
432-
title="Note 1",
433-
folder="test",
434-
content="Original content for note 1"
432+
title="Note 1", folder="test", content="Original content for note 1"
435433
)
436434
assert "# Created note" in result1
437435
assert "permalink: test/note-1" in result1
438-
436+
439437
# Step 2: Create second note with different title
440-
result2 = await write_note.fn(
441-
title="Note 2",
442-
folder="test",
443-
content="Content for note 2"
444-
)
438+
result2 = await write_note.fn(title="Note 2", folder="test", content="Content for note 2")
445439
assert "# Created note" in result2
446440
assert "permalink: test/note-2" in result2
447-
441+
448442
# Step 3: Try to create/replace first note again
449443
# This scenario would trigger the UNIQUE constraint failure before the fix
450444
result3 = await write_note.fn(
451445
title="Note 1", # Same title as first note
452-
folder="test", # Same folder as first note
453-
content="Replacement content for note 1" # Different content
446+
folder="test", # Same folder as first note
447+
content="Replacement content for note 1", # Different content
454448
)
455-
449+
456450
# This should not raise a UNIQUE constraint failure error
457451
# It should succeed and either:
458452
# 1. Update the existing note (preferred behavior)
459453
# 2. Create a new note with unique permalink (fallback behavior)
460-
454+
461455
assert result3 is not None
462-
assert ("Updated note" in result3 or "Created note" in result3)
463-
456+
assert "Updated note" in result3 or "Created note" in result3
457+
464458
# The result should contain either the original permalink or a unique one
465-
assert ("permalink: test/note-1" in result3 or "permalink: test/note-1-1" in result3)
466-
459+
assert "permalink: test/note-1" in result3 or "permalink: test/note-1-1" in result3
460+
467461
# Verify we can read back the content
468462
if "permalink: test/note-1" in result3:
469463
# Updated existing note case
470464
content = await read_note.fn("test/note-1")
471465
assert "Replacement content for note 1" in content
472466
else:
473-
# Created new note with unique permalink case
467+
# Created new note with unique permalink case
474468
content = await read_note.fn("test/note-1-1")
475469
assert "Replacement content for note 1" in content
476470
# Original note should still exist

0 commit comments

Comments
 (0)