-
Notifications
You must be signed in to change notification settings - Fork 139
Feat: Added Creator to brand and brand to creator matching...plus added some functional elements to the logged user homepage and made google auth functional in Signup/Login. #83
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
b55b7eb
e3e1c2e
0eb0b1d
a9c06cf
dcb9ab2
9023356
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,101 @@ | ||
| # FastAPI router for AI-powered endpoints, including trending niches | ||
| from fastapi import APIRouter, HTTPException, Query | ||
| from datetime import date | ||
| import os | ||
| import requests | ||
| import json | ||
| from supabase import create_client, Client | ||
| from requests.adapters import HTTPAdapter | ||
| from urllib3.util.retry import Retry | ||
|
|
||
| # Initialize router | ||
| router = APIRouter() | ||
|
|
||
| # Load environment variables for Supabase and Gemini | ||
| SUPABASE_URL = os.environ.get("SUPABASE_URL") | ||
| SUPABASE_KEY = os.environ.get("SUPABASE_KEY") | ||
| GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY") | ||
|
|
||
| # Validate required environment variables | ||
| if not all([SUPABASE_URL, SUPABASE_KEY, GEMINI_API_KEY]): | ||
| raise ValueError("Missing required environment variables: SUPABASE_URL, SUPABASE_KEY, GEMINI_API_KEY") | ||
|
|
||
| supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY) | ||
|
|
||
| def fetch_from_gemini(): | ||
| prompt = ( | ||
| "List the top 6 trending content niches for creators and brands this week. For each, provide: name (the niche), insight (a short qualitative reason why it's trending), and global_activity (a number from 1 to 5, where 5 means very high global activity in this category, and 1 means low).Return as a JSON array of objects with keys: name, insight, global_activity." | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
| ) | ||
| url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash-lite:generateContent?key={GEMINI_API_KEY}" | ||
| # Set up retry strategy | ||
| retry_strategy = Retry( | ||
| total=3, | ||
| backoff_factor=1, | ||
| status_forcelist=[429, 500, 502, 503, 504], | ||
| allowed_methods=["POST"], | ||
| ) | ||
| adapter = HTTPAdapter(max_retries=retry_strategy) | ||
| http = requests.Session() | ||
| http.mount("https://", adapter) | ||
| http.mount("http://", adapter) | ||
| resp = http.post(url, json={"contents": [{"parts": [{"text": prompt}]}]}, timeout=(3.05, 10)) | ||
| resp.raise_for_status() | ||
| print("Gemini raw response:", resp.text) | ||
| data = resp.json() | ||
| print("Gemini parsed JSON:", data) | ||
| text = data['candidates'][0]['content']['parts'][0]['text'] | ||
|
Comment on lines
+43
to
+46
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Remove sensitive data from debug logs. Printing raw API responses could expose sensitive information in logs. Consider using proper logging levels and sanitizing output. - print("Gemini raw response:", resp.text)
+ # Log success without exposing sensitive data
+ print("Gemini API request successful")
data = resp.json()
- print("Gemini parsed JSON:", data)
text = data['candidates'][0]['content']['parts'][0]['text']
- print("Gemini text to parse as JSON:", text)
🤖 Prompt for AI Agents |
||
| print("Gemini text to parse as JSON:", text) | ||
| # Remove Markdown code block if present | ||
| if text.strip().startswith('```'): | ||
| text = text.strip().split('\n', 1)[1] # Remove the first line (```json) | ||
| text = text.rsplit('```', 1)[0] # Remove the last ``` | ||
| text = text.strip() | ||
| return json.loads(text) | ||
|
Comment on lines
+46
to
+53
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Unprotected JSON parse may explode on malformed output If Gemini returns non-JSON or extra prose, - return json.loads(text)
+ try:
+ return json.loads(text)
+ except ValueError as exc:
+ logger.warning("Gemini returned non-JSON payload: %s", text[:120])
+ raise RuntimeError("Gemini payload parsing failed") from exc
🤖 Prompt for AI Agents |
||
|
|
||
| @router.get("/api/trending-niches") | ||
| def trending_niches(): | ||
| """ | ||
| API endpoint to get trending niches for the current day. | ||
| - If today's data exists in Supabase, return it. | ||
| - Otherwise, fetch from Gemini, store in Supabase, and return the new data. | ||
| - If Gemini fails, fallback to the most recent data available. | ||
| """ | ||
| today = str(date.today()) | ||
| # Check if today's data exists in Supabase | ||
| result = supabase.table("trending_niches").select("*").eq("fetched_at", today).execute() | ||
| if not result.data: | ||
| # Fetch from Gemini and store | ||
| try: | ||
| niches = fetch_from_gemini() | ||
| for niche in niches: | ||
| supabase.table("trending_niches").insert({ | ||
| "name": niche["name"], | ||
| "insight": niche["insight"], | ||
| "global_activity": int(niche["global_activity"]), | ||
| "fetched_at": today | ||
| }).execute() | ||
| result = supabase.table("trending_niches").select("*").eq("fetched_at", today).execute() | ||
| except Exception as e: | ||
| print("Gemini fetch failed:", e) | ||
| # fallback: serve most recent data | ||
| result = supabase.table("trending_niches").select("*").order("fetched_at", desc=True).limit(6).execute() | ||
| return result.data | ||
|
|
||
| youtube_router = APIRouter(prefix="/youtube", tags=["YouTube"]) | ||
|
|
||
| @youtube_router.get("/channel-info") | ||
| def get_youtube_channel_info(channelId: str = Query(..., description="YouTube Channel ID")): | ||
| """ | ||
| Proxy endpoint to fetch YouTube channel info securely from the backend. | ||
| The API key is kept secret and rate limiting can be enforced here. | ||
| """ | ||
| api_key = os.getenv("YOUTUBE_API_KEY") | ||
| if not api_key: | ||
| raise HTTPException(status_code=500, detail="YouTube API key not configured on server.") | ||
| url = f"https://www.googleapis.com/youtube/v3/channels?part=snippet,statistics&id={channelId}&key={api_key}" | ||
| try: | ||
| resp = requests.get(url, timeout=10) | ||
| resp.raise_for_status() | ||
| return resp.json() | ||
| except requests.RequestException as e: | ||
| raise HTTPException(status_code=502, detail=f"YouTube API error: {str(e)}") | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,7 @@ | ||
| from fastapi import APIRouter | ||
|
|
||
| router = APIRouter() | ||
|
|
||
| @router.get("/auth/ping") | ||
| def ping(): | ||
| return {"message": "Auth route is working!"} |
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -7,12 +7,12 @@ | |||||||||||||||||||||||||||||
| HTTPException, | ||||||||||||||||||||||||||||||
| ) | ||||||||||||||||||||||||||||||
| from sqlalchemy.ext.asyncio import AsyncSession | ||||||||||||||||||||||||||||||
| from db.db import get_db | ||||||||||||||||||||||||||||||
| from services.chat_services import chat_service | ||||||||||||||||||||||||||||||
| from ..db.db import get_db | ||||||||||||||||||||||||||||||
| from ..services.chat_services import chat_service | ||||||||||||||||||||||||||||||
| from redis.asyncio import Redis | ||||||||||||||||||||||||||||||
| from services.redis_client import get_redis | ||||||||||||||||||||||||||||||
| from ..services.redis_client import get_redis | ||||||||||||||||||||||||||||||
| import asyncio | ||||||||||||||||||||||||||||||
| from services.chat_pubsub import listen_to_channel | ||||||||||||||||||||||||||||||
| from ..services.chat_pubsub import listen_to_channel | ||||||||||||||||||||||||||||||
|
Comment on lines
+10
to
+15
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Verification agent 🧩 Analysis chainVerify the relative import structure to avoid import resolution issues. The change to relative imports may cause issues with the package structure. Static analysis tools are flagging these as "relative imports beyond top-level package." Please verify the current package structure and ensure these relative imports work correctly in your deployment environment: Consider reverting to absolute imports if the relative imports cause issues in your deployment environment: -from ..db.db import get_db
-from ..services.chat_services import chat_service
-from ..services.redis_client import get_redis
-from ..services.chat_pubsub import listen_to_channel
+from app.db.db import get_db
+from app.services.chat_services import chat_service
+from app.services.redis_client import get_redis
+from app.services.chat_pubsub import listen_to_channel🏁 Script executed: #!/bin/bash
# Description: Check the package structure and verify import paths
# Check the directory structure
echo "=== Backend directory structure ==="
find Backend -type f -name "*.py" | head -20
echo -e "\n=== Checking if the imported modules exist ==="
# Check if the imported modules exist at the expected paths
ls -la Backend/app/db/db.py
ls -la Backend/app/services/chat_services.py
ls -la Backend/app/services/redis_client.py
ls -la Backend/app/services/chat_pubsub.py
echo -e "\n=== Checking for __init__.py files ==="
# Check for __init__.py files that make directories Python packages
find Backend -name "__init__.py" -type fLength of output: 1409 Ensure package initialization or switch to absolute imports to resolve E0402 Verified that all referenced modules exist under Backend/app, but no • Add empty
• Or update imports in -from ..db.db import get_db
-from ..services.chat_services import chat_service
-from ..services.redis_client import get_redis
-from ..services.chat_pubsub import listen_to_channel
+from app.db.db import get_db
+from app.services.chat_services import chat_service
+from app.services.redis_client import get_redis
+from app.services.chat_pubsub import listen_to_channel📝 Committable suggestion
Suggested change
🧰 Tools🪛 Pylint (3.3.7)[error] 10-10: Attempted relative import beyond top-level package (E0402) [error] 11-11: Attempted relative import beyond top-level package (E0402) [error] 13-13: Attempted relative import beyond top-level package (E0402) [error] 15-15: Attempted relative import beyond top-level package (E0402) 🤖 Prompt for AI Agents |
||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||
| router = APIRouter(prefix="/chat", tags=["Chat"]) | ||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||
|
|
||||||||||||||||||||||||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,29 @@ | ||
| from fastapi import APIRouter, HTTPException | ||
| from supabase import create_client, Client | ||
| import os | ||
| from dotenv import load_dotenv | ||
| from ..services.db_service import match_creators_for_brand, match_brands_for_creator | ||
|
|
||
| # Load environment variables | ||
| # load_dotenv() | ||
| # url: str = os.getenv("SUPABASE_URL") | ||
| # key: str = os.getenv("SUPABASE_KEY") | ||
| # supabase: Client = create_client(url, key) | ||
|
|
||
| router = APIRouter(prefix="/match", tags=["Matching"]) | ||
|
|
||
| @router.get("/creators-for-brand/{sponsorship_id}") | ||
| def get_creators_for_brand(sponsorship_id: str): | ||
| matches = match_creators_for_brand(sponsorship_id) | ||
| if not matches: | ||
| raise HTTPException(status_code=404, detail="No matching creators found.") | ||
| return {"matches": matches} | ||
|
|
||
| @router.get("/brands-for-creator/{creator_id}") | ||
| def get_brands_for_creator(creator_id: str): | ||
| matches = match_brands_for_creator(creator_id) | ||
| if not matches: | ||
| raise HTTPException(status_code=404, detail="No matching brand campaigns found.") | ||
| return {"matches": matches} | ||
|
|
||
| # Placeholder for endpoints, logic to be added next |
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -1,12 +1,12 @@ | ||||||||||||||||||||||
| from fastapi import APIRouter, Depends, HTTPException | ||||||||||||||||||||||
| from sqlalchemy.ext.asyncio import AsyncSession | ||||||||||||||||||||||
| from sqlalchemy.future import select | ||||||||||||||||||||||
| from db.db import AsyncSessionLocal | ||||||||||||||||||||||
| from models.models import ( | ||||||||||||||||||||||
| from ..db.db import AsyncSessionLocal | ||||||||||||||||||||||
| from ..models.models import ( | ||||||||||||||||||||||
| User, AudienceInsights, Sponsorship, UserPost, | ||||||||||||||||||||||
| SponsorshipApplication, SponsorshipPayment, Collaboration | ||||||||||||||||||||||
| ) | ||||||||||||||||||||||
| from schemas.schema import ( | ||||||||||||||||||||||
| from ..schemas.schema import ( | ||||||||||||||||||||||
| UserCreate, AudienceInsightsCreate, SponsorshipCreate, UserPostCreate, | ||||||||||||||||||||||
| SponsorshipApplicationCreate, SponsorshipPaymentCreate, CollaborationCreate | ||||||||||||||||||||||
| ) | ||||||||||||||||||||||
|
Comment on lines
+4
to
12
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Remove unused imports to clean up the codebase. The static analysis tools correctly identified that these imports are unused. This file uses Supabase client directly for database operations, making the SQLAlchemy imports ( Remove the unused imports: -from ..db.db import AsyncSessionLocal
-from ..models.models import (
- User, AudienceInsights, Sponsorship, UserPost,
- SponsorshipApplication, SponsorshipPayment, Collaboration
-)
-from ..schemas.schema import (
- UserCreate, AudienceInsightsCreate, SponsorshipCreate, UserPostCreate,
- SponsorshipApplicationCreate, SponsorshipPaymentCreate, CollaborationCreate
-)📝 Committable suggestion
Suggested change
🧰 Tools🪛 Ruff (0.11.9)4-4: Remove unused import: (F401) 6-6: Remove unused import (F401) 6-6: Remove unused import (F401) 6-6: Remove unused import (F401) 6-6: Remove unused import (F401) 7-7: Remove unused import (F401) 7-7: Remove unused import (F401) 7-7: Remove unused import (F401) 🪛 Flake8 (7.2.0)[error] 4-4: '..db.db.AsyncSessionLocal' imported but unused (F401) [error] 5-5: '..models.models.User' imported but unused (F401) [error] 5-5: '..models.models.AudienceInsights' imported but unused (F401) [error] 5-5: '..models.models.Sponsorship' imported but unused (F401) [error] 5-5: '..models.models.UserPost' imported but unused (F401) [error] 5-5: '..models.models.SponsorshipApplication' imported but unused (F401) [error] 5-5: '..models.models.SponsorshipPayment' imported but unused (F401) [error] 5-5: '..models.models.Collaboration' imported but unused (F401) 🪛 Pylint (3.3.7)[error] 4-4: Attempted relative import beyond top-level package (E0402) [error] 5-8: Attempted relative import beyond top-level package (E0402) [error] 9-12: Attempted relative import beyond top-level package (E0402) 🤖 Prompt for AI Agents |
||||||||||||||||||||||
|
|
@@ -44,7 +44,6 @@ async def create_user(user: UserCreate): | |||||||||||||||||||||
| "id": user_id, | ||||||||||||||||||||||
| "username": user.username, | ||||||||||||||||||||||
| "email": user.email, | ||||||||||||||||||||||
| "password_hash": user.password_hash, | ||||||||||||||||||||||
| "role": user.role, | ||||||||||||||||||||||
| "profile_image": user.profile_image, | ||||||||||||||||||||||
| "bio": user.bio, | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Fix timezone inconsistency in timestamp handling.
The User model now uses timezone-naive
datetime.utcnow()withTIMESTAMPcolumns, while other models in the same file still use timezone-awaredatetime.now(timezone.utc)withDateTime(timezone=True). This inconsistency can cause issues when comparing timestamps across different models or when the database expects consistent timezone handling.Consider one of these solutions:
Option 1 (Recommended): Use timezone-aware timestamps consistently
Option 2: Update all models to use TIMESTAMP consistently
# Update all other DateTime(timezone=True) columns to use TIMESTAMP with datetime.utcnow📝 Committable suggestion
🤖 Prompt for AI Agents