Skip to content

Commit 5e7bf53

Browse files
feat: add gemini service to generate catalog names (#29)
1 parent 5b4bc0f commit 5e7bf53

File tree

8 files changed

+208
-81
lines changed

8 files changed

+208
-81
lines changed

app/api/endpoints/catalogs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ async def get_catalog(type: str, id: str, response: Response, token: str):
8383

8484
logger.info(f"Returning {len(recommendations)} items for {type}")
8585
# Cache catalog responses for 4 hours
86-
response.headers["Cache-Control"] = "public, max-age=14400"
86+
response.headers["Cache-Control"] = "public, max-age=14400" if len(recommendations) > 0 else "no-cache"
8787
return {"metas": recommendations}
8888

8989
except HTTPException:

app/core/config.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,10 @@ class Settings(BaseSettings):
3636

3737
RECOMMENDATION_SOURCE_ITEMS_LIMIT: int = 10
3838

39+
# AI
40+
DEFAULT_GEMINI_MODEL: str = "gemma-3-27b-it"
41+
GEMINI_API_KEY: str | None = None
42+
3943

4044
settings = Settings()
4145

app/services/catalog_updater.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from app.services.catalog import DynamicCatalogService
1414
from app.services.stremio_service import StremioService
1515
from app.services.token_store import token_store
16+
from app.services.translation import translation_service
1617

1718
# Max number of concurrent updates to prevent overwhelming external APIs
1819
MAX_CONCURRENT_UPDATES = 5
@@ -50,6 +51,11 @@ async def refresh_catalogs_for_credentials(token: str, credentials: dict[str, An
5051
catalogs = await dynamic_catalog_service.get_dynamic_catalogs(
5152
library_items=library_items, user_settings=user_settings
5253
)
54+
55+
if user_settings and user_settings.language:
56+
for cat in catalogs:
57+
if name := cat.get("name"):
58+
cat["name"] = await translation_service.translate(name, user_settings.language)
5359
logger.info(f"[{redact_token(token)}] Prepared {len(catalogs)} catalogs")
5460
return await stremio_service.update_catalogs(catalogs, auth_key)
5561
except Exception as e:

app/services/gemini.py

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
from google import genai
2+
from loguru import logger
3+
4+
from app.core.config import settings
5+
6+
7+
class GeminiService:
8+
def __init__(self, model: str = settings.DEFAULT_GEMINI_MODEL):
9+
self.model = model
10+
self.client = None
11+
if api_key := settings.GEMINI_API_KEY:
12+
try:
13+
self.client = genai.Client(api_key=api_key)
14+
except Exception as e:
15+
logger.warning(f"Failed to initialize Gemini client: {e}")
16+
else:
17+
logger.warning("GEMINI_API_KEY not set. Gemini features will be disabled.")
18+
19+
@staticmethod
20+
def get_prompt():
21+
return """
22+
You are a content catalog naming expert.
23+
Given filters like genre, keywords, countries, or years, generate natural,
24+
engaging catalog row titles that streaming platforms would use.
25+
26+
Examples:
27+
- Genre: Action, Country: South Korea → "Korean Action Thrillers"
28+
- Keyword: "space", Genre: Sci-Fi → "Space Exploration Adventures"
29+
- Genre: Drama, Country: France → "Acclaimed French Cinema"
30+
- Country: "USA" + Genre: "Sci-Fi and Fantasy" → "Hollywood Sci-Fi and Fantasy"
31+
- Keywords: "revenge" + "martial arts" → "Revenge & Martial Arts"
32+
33+
Keep titles:
34+
- Short (2-5 words)
35+
- Natural and engaging
36+
- Focused on what makes the content appealing
37+
- Only return a single best title and nothing else.
38+
"""
39+
40+
def generate_content(self, prompt: str) -> str:
41+
system_prompt = self.get_prompt()
42+
if not self.client:
43+
logger.warning("Gemini client not initialized. Gemini features will be disabled.")
44+
return ""
45+
try:
46+
response = self.client.models.generate_content(
47+
model=self.model,
48+
contents=system_prompt + "\n\n" + prompt,
49+
)
50+
return response.text.strip()
51+
except Exception as e:
52+
logger.error(f"Error generating content: {e}")
53+
return ""
54+
55+
56+
gemini_service = GeminiService()

app/services/row_generator.py

Lines changed: 44 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from pydantic import BaseModel
44

55
from app.models.profile import UserTasteProfile
6+
from app.services.gemini import gemini_service
67
from app.services.tmdb.countries import COUNTRY_ADJECTIVES
78
from app.services.tmdb.genre import movie_genres, series_genres
89
from app.services.tmdb_service import TMDBService
@@ -62,38 +63,54 @@ def get_cname(code):
6263
return random.choice(adjectives)
6364
return ""
6465

65-
# Strategy 1: Pure Keyword Row (Top Priority)
66+
# Strategy 1: Combined Keyword Row (Top Priority)
6667
if top_keywords:
67-
k_id = top_keywords[0][0]
68-
kw_name = await self._get_keyword_name(k_id)
69-
if kw_name:
68+
k_id1 = top_keywords[0][0]
69+
kw_name1 = await self._get_keyword_name(k_id1)
70+
71+
use_single_keyword_row = True
72+
if len(top_keywords) >= 2:
73+
k_id2 = top_keywords[1][0]
74+
kw_name2 = await self._get_keyword_name(k_id2)
75+
title = ""
76+
if kw_name1 and kw_name2:
77+
title = gemini_service.generate_content(f"Keywords: {kw_name1} + {kw_name2}")
78+
79+
if title:
80+
rows.append(
81+
RowDefinition(
82+
title=title,
83+
id=f"watchly.theme.k{k_id1}.k{k_id2}",
84+
keywords=[k_id1, k_id2],
85+
)
86+
)
87+
use_single_keyword_row = False
88+
89+
if use_single_keyword_row and kw_name1:
7090
rows.append(
7191
RowDefinition(
72-
title=f"{normalize_keyword(kw_name)}",
73-
id=f"watchly.theme.k{k_id}",
74-
keywords=[k_id],
92+
title=normalize_keyword(kw_name1),
93+
id=f"watchly.theme.k{k_id1}",
94+
keywords=[k_id1],
7595
)
7696
)
7797

7898
# Strategy 2: Keyword + Genre (Specific Niche)
79-
if top_genres and len(top_keywords) > 1:
99+
if top_genres and len(top_keywords) > 2:
80100
g_id = top_genres[0][0]
81101
# get random keywords: Just to surprise user in every refresh
82-
k_id = random.choice(top_keywords[1:])[0]
102+
k_id = random.choice(top_keywords[2:])[0]
83103

84104
if k_id:
85105
kw_name = await self._get_keyword_name(k_id)
86106
if kw_name:
87-
title = f"{normalize_keyword(kw_name)} {get_gname(g_id)}"
88-
# keyword and genre can have same name sometimes, remove if so
89-
words = title.split()
90-
seen_words = set()
91-
unique_words = []
92-
for word in words:
93-
if word not in seen_words:
94-
unique_words.append(word)
95-
seen_words.add(word)
96-
title = " ".join(unique_words)
107+
title = gemini_service.generate_content(
108+
f"Genre: {get_gname(g_id)} + Keyword: {normalize_keyword(kw_name)}"
109+
)
110+
if not title:
111+
title = f"{get_gname(g_id)} {normalize_keyword(kw_name)}"
112+
# keyword and genre can have same name sometimes, remove if so
113+
title = " ".join(dict.fromkeys(title.split()))
97114

98115
rows.append(
99116
RowDefinition(
@@ -110,9 +127,12 @@ def get_cname(code):
110127
c_code = top_countries[0][0]
111128
c_adj = get_cname(c_code)
112129
if c_adj:
130+
title = gemini_service.generate_content(f"Genre: {get_gname(g_id)} + Country: {c_adj}")
131+
if not title:
132+
title = f"{get_gname(g_id)} {c_adj}"
113133
rows.append(
114134
RowDefinition(
115-
title=f"{c_adj} {get_gname(g_id)}",
135+
title=title,
116136
id=f"watchly.theme.g{g_id}.ct{c_code}", # ct for country
117137
genres=[g_id],
118138
country=c_code,
@@ -130,9 +150,12 @@ def get_cname(code):
130150
# # Only do this if decade is valid and somewhat old (nostalgia factor)
131151
if 1970 <= decade_start <= 2010:
132152
decade_str = str(decade_start)[2:] + "s" # "90s"
153+
title = gemini_service.generate_content(f"Genre: {get_gname(g_id)} + Era: {decade_str}")
154+
if not title:
155+
title = f"{get_gname(g_id)} {decade_str}"
133156
rows.append(
134157
RowDefinition(
135-
title=f"{decade_str} {get_gname(g_id)}",
158+
title=title,
136159
id=f"watchly.theme.g{g_id}.y{decade_start}",
137160
genres=[g_id],
138161
year_range=(decade_start, decade_start + 9),

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ dependencies = [
1111
"cryptography>=46.0.3",
1212
"deep-translator>=1.11.4",
1313
"fastapi>=0.104.1",
14+
"google-genai>=1.54.0",
1415
"httpx>=0.25.2",
1516
"loguru>=0.7.2",
1617
"pydantic>=2.5.0",

requirements.txt

Lines changed: 14 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -1,59 +1,14 @@
1-
annotated-doc==0.0.4
2-
annotated-types==0.7.0
3-
anyio==4.11.0
4-
apscheduler==3.11.1
5-
async-lru==2.0.5
6-
async-timeout==5.0.1
7-
beautifulsoup4==4.14.3
8-
black==25.11.0
9-
cachetools==6.2.2
10-
certifi==2025.11.12
11-
cffi==2.0.0
12-
cfgv==3.5.0
13-
charset-normalizer==3.4.4
14-
click==8.3.1
15-
cryptography==46.0.3
16-
deep-translator==1.11.4
17-
distlib==0.4.0
18-
exceptiongroup==1.3.0
19-
fastapi==0.121.2
20-
filelock==3.20.0
21-
flake9==3.8.3.post2
22-
h11==0.16.0
23-
httpcore==1.0.9
24-
httptools==0.7.1
25-
httpx==0.28.1
26-
identify==2.6.15
27-
idna==3.11
28-
loguru==0.7.3
29-
mccabe==0.6.1
30-
mypy-extensions==1.1.0
31-
nodeenv==1.9.1
32-
packaging==25.0
33-
pathspec==0.12.1
34-
platformdirs==4.5.0
35-
pre-commit==4.4.0
36-
pycodestyle==2.6.0
37-
pycparser==2.23
38-
pydantic==2.12.4
39-
pydantic-core==2.41.5
40-
pydantic-settings==2.12.0
41-
pyflakes==2.2.0
42-
python-dotenv==1.2.1
43-
pytokens==0.3.0
44-
pyyaml==6.0.3
45-
redis==7.1.0
46-
requests==2.32.5
47-
sniffio==1.3.1
48-
soupsieve==2.8
49-
starlette==0.49.3
50-
tomli==2.3.0
51-
typing-extensions==4.15.0
52-
typing-inspection==0.4.2
53-
tzlocal==5.3.1
54-
urllib3==2.6.0
55-
uvicorn==0.38.0
56-
uvloop==0.22.1
57-
virtualenv==20.35.4
58-
watchfiles==1.1.1
59-
websockets==15.0.1
1+
apscheduler>=3.11.1
2+
async-lru>=2.0.5
3+
cachetools>=6.2.2
4+
cryptography>=46.0.3
5+
deep-translator>=1.11.4
6+
fastapi>=0.104.1
7+
google-genai>=1.54.0
8+
httpx>=0.25.2
9+
loguru>=0.7.2
10+
pydantic>=2.5.0
11+
pydantic-settings>=2.1.0
12+
redis>=5.0.1
13+
tomli>=2.3.0
14+
uvicorn[standard]>=0.24.0

0 commit comments

Comments
 (0)