Skip to content

Commit bd5c558

Browse files
feat: Refactor catalog fetching and update migration task handling
1 parent 5382408 commit bd5c558

File tree

6 files changed

+24
-134
lines changed

6 files changed

+24
-134
lines changed

app/api/endpoints/health.py

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,8 @@
11
from fastapi import APIRouter
2-
from loguru import logger
3-
4-
from app.services.token_store import token_store
52

63
router = APIRouter(tags=["health"])
74

85

96
@router.get("/health", summary="Simple readiness probe")
107
async def health_check() -> dict[str, str]:
118
return {"status": "ok"}
12-
13-
14-
@router.get("/metrics", summary="Runtime metrics (lightweight)")
15-
async def metrics() -> dict:
16-
"""Return lightweight runtime metrics useful for diagnosing Redis connection growth."""
17-
try:
18-
client = await token_store._get_client()
19-
except Exception as exc:
20-
logger.warning(f"Failed to fetch Redis client for metrics: {exc}")
21-
return {"redis": "unavailable"}
22-
23-
metrics: dict = {}
24-
try:
25-
info = await client.info(section="clients")
26-
metrics["redis_connected_clients"] = int(info.get("connected_clients", 0))
27-
except Exception as exc:
28-
logger.warning(f"Failed to read Redis INFO clients: {exc}")
29-
metrics["redis_connected_clients"] = "error"
30-
31-
try:
32-
metrics["per_request_redis_calls_last"] = token_store.get_call_count()
33-
except Exception:
34-
metrics["per_request_redis_calls_last"] = "error"
35-
36-
return metrics

app/api/endpoints/manifest.py

Lines changed: 10 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from async_lru import alru_cache
21
from fastapi import HTTPException, Response
32
from fastapi.routing import APIRouter
43

@@ -55,29 +54,14 @@ def get_base_manifest(user_settings: UserSettings | None = None):
5554
}
5655

5756

58-
# Cache catalog definitions for 1 hour (3600s)
59-
@alru_cache(maxsize=1000, ttl=3600)
60-
async def fetch_catalogs(token: str):
61-
credentials = await token_store.get_user_data(token)
62-
if not credentials:
63-
raise HTTPException(status_code=401, detail="Invalid or expired token. Please reconfigure the addon.")
64-
65-
if credentials.get("settings"):
66-
user_settings = UserSettings(**credentials["settings"])
67-
else:
68-
user_settings = get_default_settings()
69-
70-
stremio_service = StremioService(auth_key=credentials.get("authKey"))
71-
72-
# Note: get_library_items is expensive, but we need it to determine *which* genre catalogs to show.
57+
async def build_dynamic_catalogs(stremio_service: StremioService, user_settings: UserSettings) -> list[dict]:
58+
# Note: get_library_items is the heavy call; StremioService has its own short cache.
7359
library_items = await stremio_service.get_library_items()
74-
dynamic_catalog_service = DynamicCatalogService(stremio_service=stremio_service, language=user_settings.language)
75-
76-
# Base catalogs are already in manifest, these are *extra* dynamic ones
77-
# Pass user_settings to filter/rename
78-
catalogs = await dynamic_catalog_service.get_dynamic_catalogs(library_items, user_settings)
79-
80-
return catalogs
60+
dynamic_catalog_service = DynamicCatalogService(
61+
stremio_service=stremio_service,
62+
language=user_settings.language,
63+
)
64+
return await dynamic_catalog_service.get_dynamic_catalogs(library_items, user_settings)
8165

8266

8367
def get_config_id(catalog) -> str | None:
@@ -111,7 +95,9 @@ async def _manifest_handler(response: Response, token: str):
11195

11296
base_manifest = get_base_manifest(user_settings)
11397

114-
fetched_catalogs = await fetch_catalogs(token)
98+
# Build dynamic catalogs using the already-fetched credentials
99+
stremio_service = StremioService(auth_key=creds.get("authKey"))
100+
fetched_catalogs = await build_dynamic_catalogs(stremio_service, user_settings or get_default_settings())
115101

116102
all_catalogs = [c.copy() for c in base_manifest["catalogs"]] + [c.copy() for c in fetched_catalogs]
117103

app/core/app.py

Lines changed: 10 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -40,16 +40,18 @@ async def lifespan(app: FastAPI):
4040
Manage application lifespan events (startup/shutdown).
4141
"""
4242
global catalog_updater
43-
task = asyncio.create_task(migrate_tokens())
4443

45-
# Ensure background exceptions are surfaced in logs
46-
def _on_done(t: asyncio.Task):
47-
try:
48-
t.result()
49-
except Exception as exc:
50-
logger.error(f"migrate_tokens background task failed: {exc}")
44+
if settings.HOST_NAME.lower() != "https://1ccea4301587-watchly.baby-beamup.club":
45+
task = asyncio.create_task(migrate_tokens())
5146

52-
task.add_done_callback(_on_done)
47+
# Ensure background exceptions are surfaced in logs
48+
def _on_done(t: asyncio.Task):
49+
try:
50+
t.result()
51+
except Exception as exc:
52+
logger.error(f"migrate_tokens background task failed: {exc}")
53+
54+
task.add_done_callback(_on_done)
5355

5456
# Startup
5557
if settings.AUTO_UPDATE_CATALOGS:
@@ -120,22 +122,6 @@ async def block_missing_token_middleware(request: Request, call_next):
120122
return await call_next(request)
121123

122124

123-
# Middleware to track per-request Redis calls and attach as response header for diagnostics
124-
@app.middleware("http")
125-
async def redis_calls_middleware(request: Request, call_next):
126-
try:
127-
token_store.reset_call_counter()
128-
except Exception:
129-
pass
130-
response = await call_next(request)
131-
try:
132-
count = token_store.get_call_count()
133-
response.headers["X-Redis-Calls"] = str(count)
134-
except Exception:
135-
pass
136-
return response
137-
138-
139125
# Serve static files
140126
# Static directory is at project root (3 levels up from app/core/app.py)
141127
# app/core/app.py -> app/core -> app -> root

app/core/config.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,7 @@ class Settings(BaseSettings):
3232
ANNOUNCEMENT_HTML: str = ""
3333
AUTO_UPDATE_CATALOGS: bool = True
3434
CATALOG_UPDATE_MODE: Literal["cron", "interval"] = "cron" # "cron" for fixed times, "interval" for periodic
35-
CATALOG_UPDATE_CRON_SCHEDULES: list[dict] = (
36-
{"hour": 12, "minute": 0, "id": "catalog_refresh_noon"},
37-
{"hour": 0, "minute": 0, "id": "catalog_refresh_midnight"},
38-
)
35+
CATALOG_UPDATE_CRON_SCHEDULES: list[dict] = ({"hour": 0, "minute": 0, "id": "catalog_refresh_midnight"},)
3936
CATALOG_REFRESH_INTERVAL_SECONDS: int = 6 * 60 * 60 # 6 hours (used when CATALOG_UPDATE_MODE="interval")
4037
APP_ENV: Literal["development", "production", "vercel"] = "development"
4138
HOST_NAME: str = "https://1ccea4301587-watchly.baby-beamup.club"

app/services/stremio_service.py

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -215,15 +215,11 @@ async def get_user_email(self) -> str:
215215
user_info = await self.get_user_info()
216216
return user_info.get("email", "")
217217

218-
async def get_library_items(self, use_cache: bool = True, cache_ttl_seconds: int = 30) -> dict[str, list[dict]]:
218+
async def get_library_items(self) -> dict[str, list[dict]]:
219219
"""
220220
Fetch library items from Stremio once and return both watched and loved items.
221221
Returns a dict with 'watched' and 'loved' keys.
222222
"""
223-
import time
224-
225-
if use_cache and self._library_cache and time.time() < self._library_cache_expiry:
226-
return self._library_cache
227223

228224
if not self._auth_key:
229225
logger.warning("Stremio auth key not configured")
@@ -333,10 +329,6 @@ def _sort_key(x: dict):
333329
"added": added_items,
334330
"removed": removed_items,
335331
}
336-
# cache
337-
if use_cache and cache_ttl_seconds > 0:
338-
self._library_cache = result
339-
self._library_cache_expiry = time.time() + cache_ttl_seconds
340332
return result
341333
except Exception as e:
342334
logger.error(f"Error fetching library items: {e}", exc_info=True)

app/services/token_store.py

Lines changed: 2 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import base64
2-
import contextvars
32
import json
43
from collections.abc import AsyncIterator
54
from typing import Any
@@ -25,10 +24,7 @@ def __init__(self) -> None:
2524
self._client: redis.Redis | None = None
2625
# Negative cache for missing tokens to avoid repeated Redis GETs
2726
# when external probes request non-existent tokens.
28-
self._missing_tokens: TTLCache = TTLCache(maxsize=10000, ttl=3600)
29-
# per-request redis call counter (context-local)
30-
self._redis_calls_var: contextvars.ContextVar[int] = contextvars.ContextVar("watchly_redis_calls", default=0)
31-
27+
self._missing_tokens: TTLCache = TTLCache(maxsize=10000, ttl=86400)
3228
if not settings.REDIS_URL:
3329
logger.warning("REDIS_URL is not set. Token storage will fail until a Redis instance is configured.")
3430

@@ -82,9 +78,6 @@ async def _get_client(self) -> redis.Redis:
8278
health_check_interval=30,
8379
socket_keepalive=True,
8480
)
85-
# If _get_client is called multiple times in different contexts it
86-
# could indicate multiple processes/threads or a bug opening
87-
# additional clients; log a stacktrace for debugging.
8881
if getattr(self, "_creation_count", None) is None:
8982
self._creation_count = 1
9083
else:
@@ -148,18 +141,10 @@ async def store_user_data(self, user_id: str, payload: dict[str, Any]) -> str:
148141
json_str = json.dumps(storage_data)
149142

150143
if settings.TOKEN_TTL_SECONDS and settings.TOKEN_TTL_SECONDS > 0:
151-
self._incr_calls()
152144
await client.setex(key, settings.TOKEN_TTL_SECONDS, json_str)
153145
else:
154-
self._incr_calls()
155146
await client.set(key, json_str)
156147

157-
# Invalidate async LRU cached reads so future reads use the updated payload
158-
try:
159-
self.get_user_data.cache_clear()
160-
except Exception:
161-
pass
162-
163148
# Ensure we remove from negative cache so new value is read next time
164149
try:
165150
if token in self._missing_tokens:
@@ -169,7 +154,7 @@ async def store_user_data(self, user_id: str, payload: dict[str, Any]) -> str:
169154

170155
return token
171156

172-
@alru_cache(maxsize=5000)
157+
@alru_cache(maxsize=10000, ttl=43200)
173158
async def get_user_data(self, token: str) -> dict[str, Any] | None:
174159
# Short-circuit for tokens known to be missing
175160
try:
@@ -182,7 +167,6 @@ async def get_user_data(self, token: str) -> dict[str, Any] | None:
182167
logger.debug(f"[REDIS] Cache miss. Fetching data from redis for {token}")
183168
key = self._format_key(token)
184169
client = await self._get_client()
185-
self._incr_calls()
186170
data_raw = await client.get(key)
187171

188172
if not data_raw:
@@ -208,14 +192,8 @@ async def delete_token(self, token: str = None, key: str = None) -> None:
208192
key = self._format_key(token)
209193

210194
client = await self._get_client()
211-
self._incr_calls()
212195
await client.delete(key)
213196

214-
# Invalidate async LRU cached reads
215-
try:
216-
self.get_user_data.cache_clear()
217-
except Exception:
218-
pass
219197
# Remove from negative cache as token is deleted
220198
try:
221199
if token and token in self._missing_tokens:
@@ -265,7 +243,6 @@ async def iter_payloads(self, batch_size: int = 200) -> AsyncIterator[tuple[str,
265243
# Flush remainder
266244
if buffer:
267245
try:
268-
self._incr_calls()
269246
values = await client.mget(buffer)
270247
except (redis.RedisError, OSError) as exc:
271248
logger.warning(f"Failed batch fetch for {len(buffer)} keys: {exc}")
@@ -288,25 +265,5 @@ async def iter_payloads(self, batch_size: int = 200) -> AsyncIterator[tuple[str,
288265
except (redis.RedisError, OSError) as exc:
289266
logger.warning(f"Failed to scan credential tokens: {exc}")
290267

291-
# ---- Diagnostics ----
292-
def _incr_calls(self) -> None:
293-
try:
294-
current = self._redis_calls_var.get()
295-
self._redis_calls_var.set(current + 1)
296-
except Exception:
297-
pass
298-
299-
def reset_call_counter(self) -> None:
300-
try:
301-
self._redis_calls_var.set(0)
302-
except Exception:
303-
pass
304-
305-
def get_call_count(self) -> int:
306-
try:
307-
return int(self._redis_calls_var.get())
308-
except Exception:
309-
return 0
310-
311268

312269
token_store = TokenStore()

0 commit comments

Comments
 (0)