Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions AKSHAT_YADAV/CHAT-APP/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# .env.example

# LLM Keys
OPENROUTER_API_KEY=

# Firebase
FIREBASE_API_KEY=
FIREBASE_PROJECT_ID=
FIREBASE_AUTH_DOMAIN=

# PostgreSQL
POSTGRES_URL=postgresql://username:password@localhost:5432/llm_chat_db

# App
SECRET_KEY=
14 changes: 14 additions & 0 deletions AKSHAT_YADAV/CHAT-APP/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
.env
__pycache__/
venv/
*.pyc
*.pyo
*.pyd
.DS_Store
*.sqlite3
*.log
node_modules/
FRONTEND/__pycache__/
BACKEND/__pycache__/
FRONTEND/venv/
BACKEND/venv/
Empty file.
9 changes: 9 additions & 0 deletions AKSHAT_YADAV/CHAT-APP/BACKEND/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import os
import psycopg2
from dotenv import load_dotenv

load_dotenv()

def get_db_connection():
conn = psycopg2.connect(os.getenv("POSTGRES_URL"))
return conn
126 changes: 126 additions & 0 deletions AKSHAT_YADAV/CHAT-APP/BACKEND/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
# backend/main.py
from fastapi import FastAPI, HTTPException, Body
from fastapi.middleware.cors import CORSMiddleware
from db import get_db_connection
from pydantic import BaseModel
from models.openrouter import call_openrouter
import uuid
import os
import psycopg2
from typing import List
from dotenv import load_dotenv

# Load environment variables
load_dotenv()

app = FastAPI()

# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Allows all origins
allow_credentials=True,
allow_methods=["*"], # Allows all methods
allow_headers=["*"], # Allows all headers
)

@app.get("/")
def read_root():
return {"msg": "LLM Chat Backend is running"}


# Pydantic models
class SessionRequest(BaseModel):
user_id: str = None # Optional, but not required anymore

class SessionResponse(BaseModel):
session_id: str

class ChatMessage(BaseModel):
role: str # "user" or "assistant"
content: str

class ChatRequest(BaseModel):
session_id: str
model: str
message: str

class ChatResponse(BaseModel):
response: str

class ChatHistoryResponse(BaseModel):
history: List[ChatMessage]

@app.get("/db-test")
def db_test():
try:
conn = get_db_connection()
cur = conn.cursor()
cur.execute("SELECT 1;")
result = cur.fetchone()
cur.close()
conn.close()
return {"db_connection": "success", "result": result}
except Exception as e:
return {"db_connection": "failed", "error": str(e)}


# /chat/session endpoint
@app.post("/chat/session", response_model=SessionResponse)
def chat_session(payload: SessionRequest):
conn = get_db_connection()
cur = conn.cursor()
# Create new session (no user required)
session_id = str(uuid.uuid4())
cur.execute("INSERT INTO sessions (session_id) VALUES (%s)", (session_id,))
conn.commit()
cur.close()
conn.close()
return SessionResponse(session_id=session_id)

@app.post("/chat", response_model=ChatResponse)
def chat(request: ChatRequest):
conn = get_db_connection()
cur = conn.cursor()
# Get previous messages for context
cur.execute("SELECT role, message FROM chat_messages WHERE session_id = %s ORDER BY timestamp ASC", (request.session_id,))
history = [{"role": row[0], "content": row[1]} for row in cur.fetchall()]
# Add the new user message
history.append({"role": "user", "content": request.message})
# Call the LLM
response = call_openrouter(request.model, history)
# Save user message
cur.execute(
"INSERT INTO chat_messages (id, session_id, role, message, model) VALUES (%s, %s, %s, %s, %s)",
(str(uuid.uuid4()), request.session_id, "user", request.message, request.model)
)
# Save assistant message
cur.execute(
"INSERT INTO chat_messages (id, session_id, role, message, model) VALUES (%s, %s, %s, %s, %s)",
(str(uuid.uuid4()), request.session_id, "assistant", response, request.model)
)
conn.commit()
cur.close()
conn.close()
return ChatResponse(response=response)

@app.get("/chat/history", response_model=ChatHistoryResponse)
def chat_history(session_id: str):
conn = get_db_connection()
cur = conn.cursor()
cur.execute("SELECT role, message FROM chat_messages WHERE session_id = %s ORDER BY timestamp ASC", (session_id,))
history = [ChatMessage(role=row[0], content=row[1]) for row in cur.fetchall()]
cur.close()
conn.close()
return ChatHistoryResponse(history=history)

# --- New endpoint to list all sessions (for sidebar)
@app.get("/chat/sessions")
def list_sessions():
conn = get_db_connection()
cur = conn.cursor()
cur.execute("SELECT session_id FROM sessions ORDER BY session_id DESC")
sessions = [row[0] for row in cur.fetchall()]
cur.close()
conn.close()
return {"sessions": sessions}
32 changes: 32 additions & 0 deletions AKSHAT_YADAV/CHAT-APP/BACKEND/models/openrouter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os
import requests
import json

OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")
OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions"

def call_openrouter(model: str, messages: list):
if not OPENROUTER_API_KEY:
raise ValueError("OPENROUTER_API_KEY not found in environment variables")

headers = {
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"Content-Type": "application/json",
"HTTP-Referer": "http://localhost:3000", # Optional: your site URL
"X-Title": "LLM Chat App" # Optional: your app name
}
data = {
"model": model,
"messages": messages
}

try:
response = requests.post(OPENROUTER_API_URL, headers=headers, json=data)
print(f"OpenRouter response status: {response.status_code}")
print(f"OpenRouter response: {response.text}")
response.raise_for_status()
return response.json()["choices"][0]["message"]["content"]
except requests.exceptions.HTTPError as e:
print(f"OpenRouter API Error: {e}")
print(f"Request data: {json.dumps(data, indent=2)}")
raise e
Binary file added AKSHAT_YADAV/CHAT-APP/BACKEND/requirements.txt
Binary file not shown.
Empty file.
152 changes: 152 additions & 0 deletions AKSHAT_YADAV/CHAT-APP/FRONTEND/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
# LLM Chat App - Frontend

A modern web-based chat interface for interacting with Large Language Models through OpenRouter API.

## Features

- **Modern UI**: Clean, responsive design inspired by ChatGPT
- **Multiple Models**: Support for 5 different LLM models
- **Session Management**: Create new chats and browse chat history
- **Real-time Chat**: Instant messaging with typing indicators
- **Responsive Design**: Works on desktop and mobile devices
- **Auto-save**: All conversations are automatically saved

## How to Run

### Prerequisites
- Backend server running on `http://localhost:8000`
- Modern web browser (Chrome, Firefox, Safari, Edge)

### Running the Frontend

1. **Simple Method**: Just open `index.html` in your web browser
- Double-click the `index.html` file
- Or right-click and select "Open with browser"

2. **Local Server Method** (recommended for development):
```bash
# Using Python (if you have Python installed)
python -m http.server 8080

# Or using Node.js live-server (if you have Node.js)
npx live-server --port=8080

# Then open http://localhost:8080 in your browser
```

3. **VS Code Live Server Extension**:
- Install "Live Server" extension in VS Code
- Right-click on `index.html` and select "Open with Live Server"

### Backend Setup
Make sure your FastAPI backend is running:
```bash
cd ../BACKEND
uvicorn main:app --reload
```

## File Structure

```
FRONTEND/
├── index.html # Main HTML structure
├── styles.css # All CSS styling
├── script.js # JavaScript functionality
└── README.md # This file
```

## Features Overview

### Chat Interface
- Clean, modern design with user and AI message bubbles
- Typing indicators when AI is responding
- Auto-scroll to latest messages
- Timestamp for each message

### Model Selection
- Dropdown to choose between 5 LLM models:
- Mistral 7B Instruct
- OpenChat 3.5
- NeuralBeagle 7B
- Meta LLaMA 3 8B Instruct
- HuggingFace Zephyr 7B Beta

### Session Management
- Create new chat sessions
- View all previous sessions in sidebar
- Click on any session to load its history
- Sessions are automatically saved in the database

### User Experience
- **Enter key** to send messages
- **Shift+Enter** for new lines
- Auto-resizing text input
- Loading states and error handling
- Connection status indicator
- Mobile-responsive design

## Browser Compatibility

- Chrome 60+
- Firefox 55+
- Safari 12+
- Edge 79+

## Troubleshooting

### Common Issues

1. **"Failed to connect to backend"**
- Make sure the backend server is running on `http://localhost:8000`
- Check if there are any CORS issues in the browser console

2. **Messages not sending**
- Verify your OpenRouter API key is set in the backend `.env` file
- Check browser console for JavaScript errors

3. **Sessions not loading**
- Ensure PostgreSQL database is running
- Check if the database tables exist

4. **Styling issues**
- Make sure all three files (HTML, CSS, JS) are in the same directory
- Check browser console for any file loading errors

### Development Tips

- Open browser Developer Tools (F12) to debug issues
- Check the Network tab for API call failures
- Console tab will show JavaScript errors
- Use the responsive design mode to test mobile layout

## API Endpoints Used

The frontend communicates with these backend endpoints:

- `GET /` - Health check
- `POST /chat/session` - Create new session
- `GET /chat/sessions` - Get all sessions
- `POST /chat` - Send message
- `GET /chat/history` - Get chat history

## Customization

### Styling
Edit `styles.css` to customize:
- Colors and themes
- Layout and spacing
- Animation effects
- Mobile responsiveness

### Functionality
Edit `script.js` to modify:
- API endpoints
- Message formatting
- User interactions
- Error handling

### Layout
Edit `index.html` to change:
- Page structure
- UI components
- Meta tags and title
Loading