Skip to content

Commit 60fbd0c

Browse files
feat: improve token logging in UserCacheService by redacting sensitive information in debug and warning messages
1 parent 3535983 commit 60fbd0c

File tree

1 file changed

+15
-14
lines changed

1 file changed

+15
-14
lines changed

app/services/user_cache.py

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from loguru import logger
55

66
from app.core.constants import CATALOG_KEY, LIBRARY_ITEMS_KEY, PROFILE_KEY, WATCHED_SETS_KEY
7+
from app.core.security import redact_token
78
from app.models.taste_profile import TasteProfile
89
from app.services.redis_service import redis_service
910

@@ -43,7 +44,7 @@ async def get_library_items(self, token: str) -> dict[str, Any] | None:
4344
try:
4445
return json.loads(cached)
4546
except json.JSONDecodeError as e:
46-
logger.warning(f"Failed to decode cached library items for {token[:8]}...: {e}")
47+
logger.warning(f"Failed to decode cached library items for {redact_token(token)}...: {e}")
4748
return None
4849

4950
return None
@@ -58,7 +59,7 @@ async def set_library_items(self, token: str, library_items: dict[str, Any]) ->
5859
"""
5960
key = self._library_items_key(token)
6061
await redis_service.set(key, json.dumps(library_items))
61-
logger.debug(f"[{token[:8]}...] Cached library items")
62+
logger.debug(f"[{redact_token(token)}...] Cached library items")
6263

6364
# Invalidate all catalog caches when library items are updated
6465
# This ensures catalogs are regenerated with fresh library data
@@ -73,7 +74,7 @@ async def invalidate_library_items(self, token: str) -> None:
7374
"""
7475
key = self._library_items_key(token)
7576
await redis_service.delete(key)
76-
logger.debug(f"[{token[:8]}...] Invalidated library items cache")
77+
logger.debug(f"[{redact_token(token)}...] Invalidated library items cache")
7778

7879
# Profile Methods
7980

@@ -95,7 +96,7 @@ async def get_profile(self, token: str, content_type: str) -> TasteProfile | Non
9596
try:
9697
return TasteProfile.model_validate_json(cached)
9798
except Exception as e:
98-
logger.warning(f"Failed to decode cached profile for {token[:8]}.../{content_type}: {e}")
99+
logger.warning(f"Failed to decode cached profile for {redact_token(token)}.../{content_type}: {e}")
99100
return None
100101

101102
return None
@@ -111,7 +112,7 @@ async def set_profile(self, token: str, content_type: str, profile: TasteProfile
111112
"""
112113
key = self._profile_key(token, content_type)
113114
await redis_service.set(key, profile.model_dump_json())
114-
logger.debug(f"[{token[:8]}...] Cached profile for {content_type}")
115+
logger.debug(f"[{redact_token(token)}...] Cached profile for {content_type}")
115116

116117
async def invalidate_profile(self, token: str, content_type: str) -> None:
117118
"""
@@ -123,7 +124,7 @@ async def invalidate_profile(self, token: str, content_type: str) -> None:
123124
"""
124125
key = self._profile_key(token, content_type)
125126
await redis_service.delete(key)
126-
logger.debug(f"[{token[:8]}...] Invalidated profile cache for {content_type}")
127+
logger.debug(f"[{redact_token(token)}...] Invalidated profile cache for {content_type}")
127128

128129
# Watched Sets Methods
129130

@@ -148,7 +149,7 @@ async def get_watched_sets(self, token: str, content_type: str) -> tuple[set[int
148149
watched_imdb = set(data.get("watched_imdb", []))
149150
return (watched_tmdb, watched_imdb)
150151
except (json.JSONDecodeError, KeyError, TypeError) as e:
151-
logger.warning(f"Failed to decode cached watched sets for {token[:8]}.../{content_type}: {e}")
152+
logger.warning(f"Failed to decode cached watched sets for {redact_token(token)}.../{content_type}: {e}")
152153
return None
153154

154155
return None
@@ -171,7 +172,7 @@ async def set_watched_sets(
171172
"watched_imdb": list(watched_imdb),
172173
}
173174
await redis_service.set(key, json.dumps(data))
174-
logger.debug(f"[{token[:8]}...] Cached watched sets for {content_type}")
175+
logger.debug(f"[{redact_token(token)}...] Cached watched sets for {content_type}")
175176

176177
async def invalidate_watched_sets(self, token: str, content_type: str) -> None:
177178
"""
@@ -183,7 +184,7 @@ async def invalidate_watched_sets(self, token: str, content_type: str) -> None:
183184
"""
184185
key = self._watched_sets_key(token, content_type)
185186
await redis_service.delete(key)
186-
logger.debug(f"[{token[:8]}...] Invalidated watched sets cache for {content_type}")
187+
logger.debug(f"[{redact_token(token)}...] Invalidated watched sets cache for {content_type}")
187188

188189
# Combined Methods
189190

@@ -250,7 +251,7 @@ async def invalidate_all_user_data(self, token: str) -> None:
250251
await self.invalidate_profile(token, content_type)
251252
await self.invalidate_watched_sets(token, content_type)
252253
await self.invalidate_all_catalogs(token)
253-
logger.debug(f"[{token[:8]}...] Invalidated all user data cache")
254+
logger.debug(f"[{redact_token(token)}...] Invalidated all user data cache")
254255

255256
async def get_catalog(self, token: str, type: str, id: str) -> dict[str, Any] | None:
256257
"""
@@ -282,7 +283,7 @@ async def set_catalog(
282283
"""
283284
key = CATALOG_KEY.format(token=token, type=type, id=id)
284285
await redis_service.set(key, json.dumps(catalog), ttl)
285-
logger.debug(f"[{token[:8]}...] Cached catalog for {type}/{id}")
286+
logger.debug(f"[{redact_token(token)}...] Cached catalog for {type}/{id}")
286287

287288
async def invalidate_catalog(self, token: str, type: str, id: str) -> None:
288289
"""
@@ -295,7 +296,7 @@ async def invalidate_catalog(self, token: str, type: str, id: str) -> None:
295296
"""
296297
key = CATALOG_KEY.format(token=token, type=type, id=id)
297298
await redis_service.delete(key)
298-
logger.debug(f"[{token[:8]}...] Invalidated catalog cache for {type}/{id}")
299+
logger.debug(f"[{redact_token(token)}...] Invalidated catalog cache for {type}/{id}")
299300

300301
async def invalidate_all_catalogs(self, token: str) -> None:
301302
"""
@@ -310,9 +311,9 @@ async def invalidate_all_catalogs(self, token: str) -> None:
310311
pattern = f"watchly:catalog:{token}:*"
311312
deleted_count = await redis_service.delete_by_pattern(pattern)
312313
if deleted_count > 0:
313-
logger.debug(f"[{token[:8]}...] Invalidated {deleted_count} catalog cache(s)")
314+
logger.debug(f"[{redact_token(token)}...] Invalidated {deleted_count} catalog cache(s)")
314315
else:
315-
logger.debug(f"[{token[:8]}...] No catalog caches found to invalidate")
316+
logger.debug(f"[{redact_token(token)}...] No catalog caches found to invalidate")
316317

317318

318319
user_cache = UserCacheService()

0 commit comments

Comments
 (0)