Skip to content

Commit e2a241e

Browse files
committed
Fix: Update frontend to use live HF backend and fix null-safety in toLowerCase (Excluded models)
1 parent 9be59c3 commit e2a241e

File tree

9 files changed

+172
-17
lines changed

9 files changed

+172
-17
lines changed

.gitignore

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,22 @@
1-
backend/models/
1+
# Python
2+
__pycache__/
3+
*.py[cod]
4+
*$py.class
5+
venv/
6+
env/
27
.env
8+
9+
# Models (Keep them if they are small, but here they are large.
10+
# We should probably only keep the ones we actually use if possible,
11+
# but for now we'll just ignore the venv)
12+
# Models
13+
backend/models/
14+
backend/venv/
15+
16+
# Logs and Data
17+
data/low_confidence_log.json
18+
user_feedback_rows.csv
19+
20+
# Git
21+
.git/
22+
.github/

Frontend/src/components/shared/BugReportWidget.jsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import html2canvas from 'html2canvas';
55
import { supabase } from '../../lib/supabaseClient';
66
import useAuthStore from '../../store/authStore';
77
import useToastStore from '../../store/toastStore';
8+
import { API_CONFIG } from '../../config';
89

910
// Reusable Hook for Auto-Diagnostics
1011
function useDiagnostics() {
@@ -288,7 +289,7 @@ const BugReportWidget = () => {
288289
let probableCause = "Not analyzed";
289290
try {
290291
// Using standard fetch assuming backend is on port 8000
291-
const aiResponse = await fetch("http://localhost:8000/ai/analyze_bug", {
292+
const aiResponse = await fetch(`${API_CONFIG.BACKEND_URL}/ai/analyze_bug`, {
292293
method: "POST",
293294
headers: { "Content-Type": "application/json" },
294295
body: JSON.stringify({

Frontend/src/config.js

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,11 @@
33
*/
44

55
const getBackendUrl = () => {
6-
// Priority: .env → system default → localhost fallback
76
const envUrl = import.meta.env.VITE_BACKEND_URL;
8-
if (envUrl) return envUrl.trim().replace(/\/$/, ''); // Remove whitespace and trailing slash
7+
if (envUrl) return envUrl.trim().replace(/\/$/, '');
98

10-
// In production, we might want to default to something else
11-
if (import.meta.env.PROD) {
12-
return 'https://ritesh19180-ai-helpdesk-api.hf.space';
13-
}
14-
15-
return 'http://localhost:8000';
9+
// Default to the live Hugging Face Space for stability
10+
return 'https://ritesh19180-ai-helpdesk-api.hf.space';
1611
};
1712

1813
export const API_CONFIG = {

Frontend/src/pages/AiBenchmarking.jsx

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import {
1515
BrainCircuit
1616
} from 'lucide-react';
1717
import axios from 'axios';
18+
import { API_CONFIG } from '../config';
1819

1920
const AiBenchmarking = () => {
2021
const [inputText, setInputText] = useState('');
@@ -51,17 +52,17 @@ const AiBenchmarking = () => {
5152
try {
5253
// Run V1
5354
const startV1 = performance.now();
54-
const resV1 = await axios.post('http://localhost:8000/ai/analyze_ticket', payload);
55+
const resV1 = await axios.post(`${API_CONFIG.BACKEND_URL}/ai/analyze_ticket`, payload);
5556
const endV1 = performance.now();
5657

5758
// Run V2
5859
const startV2 = performance.now();
59-
const resV2 = await axios.post('http://localhost:8000/ai/analyze-v2', payload);
60+
const resV2 = await axios.post(`${API_CONFIG.BACKEND_URL}/ai/analyze-v2`, payload);
6061
const endV2 = performance.now();
6162

6263
// Run V3
6364
const startV3 = performance.now();
64-
const resV3 = await axios.post('http://localhost:8000/ai/analyze-v3', payload);
65+
const resV3 = await axios.post(`${API_CONFIG.BACKEND_URL}/ai/analyze-v3`, payload);
6566
const endV3 = performance.now();
6667

6768
setResults({

Frontend/src/user/pages/Profile.jsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,8 @@ const Profile = () => {
185185
}
186186

187187
const ticketsCreated = userTickets.length;
188-
const ticketsResolvedByAI = userTickets.filter(t => t.status?.toLowerCase().includes('auto') || t.status?.toLowerCase().includes('resolved')).length;
189-
const ticketsEscalated = userTickets.filter(t => t.status?.toLowerCase().includes('escalat') || t.status?.toLowerCase() === 'open' || t.status?.toLowerCase().includes('pending')).length;
188+
const ticketsResolvedByAI = userTickets.filter(t => t.status?.toLowerCase()?.includes('auto') || t.status?.toLowerCase()?.includes('resolved')).length;
189+
const ticketsEscalated = userTickets.filter(t => t.status?.toLowerCase()?.includes('escalat') || t.status?.toLowerCase() === 'open' || t.status?.toLowerCase()?.includes('pending')).length;
190190

191191
return (
192192
<div className="min-h-screen bg-[#f6f8f7] pb-20">

backend/.dockerignore

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
venv/
2+
backend/venv/
3+
**/__pycache__/
4+
*.pyc
5+
.git/
6+
.github/
7+
Frontend/
8+
Model/
9+
supabase/
10+
README.md
11+
LICENSE
12+
.env
13+
data/
14+
training/
15+
*.csv
16+
# If models are truly huge we could ignore them and download them in the dockerfile,
17+
# but for now let's just ignore the venv and other bloat.

backend/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Use an official Python runtime as a parent image
22
FROM python:3.10-slim
33

4-
LABEL version="1.1.1" rebuild_trigger="2026-03-08-1930"
4+
LABEL version="1.1.1" rebuild_trigger="2026-03-08-2032"
55

66
# Set the working directory to /app
77
WORKDIR /app

backend/main.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
# Suppress harmless PyTorch CPU pin_memory warning
1515
warnings.filterwarnings("ignore", message="'pin_memory'")
1616

17-
# HF Rebuild Trigger: 2026-03-08
17+
# HF Rebuild Trigger: 2026-03-08-2030
1818
from fastapi import FastAPI, Depends, HTTPException, Request
1919
from fastapi.middleware.cors import CORSMiddleware
2020
from fastapi.responses import HTMLResponse

sync_live.py

Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
import os
2+
import shutil
3+
import subprocess
4+
import tempfile
5+
6+
# Configuration
7+
TEMP_DIR = os.path.join(tempfile.gettempdir(), "hf_deploy_clean")
8+
HF_REPO_URL = "https://huggingface.co/spaces/ritesh19180/ai-helpdesk-api"
9+
BACKEND_SRC = r"c:\Projects\Software Projects\AI-Powered-Ticket-Creation-and-Categorization-from-User-Input\backend"
10+
11+
# Files/Dirs to include in deployment
12+
INCLUDE_LIST = [
13+
"main.py",
14+
"requirements.txt",
15+
"Dockerfile",
16+
"__init__.py",
17+
".env",
18+
"services/",
19+
"models/classifier/",
20+
"models/ner/",
21+
"supabase/",
22+
]
23+
24+
def remove_readonly(func, path, excinfo):
25+
import stat
26+
os.chmod(path, stat.S_IWRITE)
27+
func(path)
28+
29+
def run_cmd(cmd_list, cwd=None):
30+
print(f"Running: {' '.join(cmd_list)}")
31+
result = subprocess.run(cmd_list, cwd=cwd, capture_output=True, text=True)
32+
if result.returncode != 0:
33+
print(f"Error: {result.stderr}")
34+
else:
35+
print(f"Success: {result.stdout[:200]}...")
36+
return result
37+
38+
def deploy():
39+
global TEMP_DIR
40+
# 1. Prepare Clean Temp Directory
41+
if os.path.exists(TEMP_DIR):
42+
print(f"Cleaning up existing temp directory: {TEMP_DIR}")
43+
try:
44+
shutil.rmtree(TEMP_DIR, onerror=remove_readonly)
45+
except Exception as e:
46+
print(f"Cleanup warning: {e}")
47+
# Fallback to a different directory if cleanup fails
48+
import time
49+
TEMP_DIR += f"_{int(time.time())}"
50+
os.makedirs(TEMP_DIR)
51+
else:
52+
os.makedirs(TEMP_DIR)
53+
54+
print(f"Using temp directory: {TEMP_DIR}")
55+
56+
# 2. Copy Essential Backend Files
57+
for item in INCLUDE_LIST:
58+
src_path = os.path.join(BACKEND_SRC, item.strip("/"))
59+
dst_path = os.path.join(TEMP_DIR, item.strip("/"))
60+
61+
if os.path.isdir(src_path):
62+
shutil.copytree(src_path, dst_path)
63+
print(f"Copied directory: {item}")
64+
elif os.path.isfile(src_path):
65+
# Ensure parent exists
66+
os.makedirs(os.path.dirname(dst_path), exist_ok=True)
67+
shutil.copy2(src_path, dst_path)
68+
print(f"Copied file: {item}")
69+
else:
70+
print(f"Warning: {item} not found, skipping.")
71+
72+
# 2.1 Generate Hugging Face README with Metadata
73+
hf_readme_content = """---
74+
title: AI Helpdesk API
75+
emoji: 🚀
76+
colorFrom: green
77+
colorTo: blue
78+
sdk: docker
79+
pinned: false
80+
---
81+
82+
# HELPDESK.AI - API Engine
83+
Intelligent ticket classification and routing backend.
84+
"""
85+
with open(os.path.join(TEMP_DIR, "README.md"), "w", encoding="utf-8") as f:
86+
f.write(hf_readme_content)
87+
print("Generated Hugging Face README.md metadata")
88+
89+
# 3. Setup Git and Push
90+
run_cmd(["git", "init"], cwd=TEMP_DIR)
91+
92+
# 3.1 Setup Git LFS for Large Models
93+
run_cmd(["git", "lfs", "install"], cwd=TEMP_DIR)
94+
run_cmd(["git", "lfs", "track", "*.safetensors", "*.pt", "*.bin", "*.pkl", "*.h5"], cwd=TEMP_DIR)
95+
96+
run_cmd(["git", "config", "user.name", "Antigravity"], cwd=TEMP_DIR)
97+
run_cmd(["git", "config", "user.email", "ai-agent@helpdesk.ai"], cwd=TEMP_DIR)
98+
run_cmd(["git", "config", "commit.gpgsign", "false"], cwd=TEMP_DIR)
99+
100+
run_cmd(["git", "add", ".gitattributes"], cwd=TEMP_DIR)
101+
run_cmd(["git", "add", "."], cwd=TEMP_DIR)
102+
run_cmd(["git", "commit", "-m", "Deploy clean backend from scratch with Git LFS"], cwd=TEMP_DIR)
103+
104+
# Get the branch name (master or main)
105+
res = run_cmd(["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=TEMP_DIR)
106+
branch = res.stdout.strip()
107+
if not branch or branch == "HEAD": branch = "main" # Fallback
108+
109+
run_cmd(["git", "remote", "add", "origin", HF_REPO_URL], cwd=TEMP_DIR)
110+
print(f"Pushing to Hugging Face (both main and master branches)...")
111+
112+
# Try pushing to main vs master
113+
run_cmd(["git", "push", "-f", "origin", f"{branch}:main"], cwd=TEMP_DIR)
114+
run_cmd(["git", "push", "-f", "origin", f"{branch}:master"], cwd=TEMP_DIR)
115+
116+
# We exit with success if at least one worked (some spaces use main, some master)
117+
print("\n=== DEPLOYMENT COMPLETED ===")
118+
print("HF Space should be rebuilding now. Please wait a few minutes.")
119+
120+
if __name__ == "__main__":
121+
deploy()

0 commit comments

Comments
 (0)