diff --git a/CONVERSATION_HISTORY.md b/CONVERSATION_HISTORY.md
new file mode 100644
index 00000000..5bc5f11e
--- /dev/null
+++ b/CONVERSATION_HISTORY.md
@@ -0,0 +1,170 @@
+# Conversation History Implementation
+
+This document describes the conversation history feature that has been added to the application.
+
+## Overview
+
+The application now supports persistent conversation history using SQLite database. Users can:
+- Save conversations automatically
+- View all previous conversations
+- Resume any previous conversation
+- Delete conversations
+- Start new conversations
+
+## Backend Changes
+
+### 1. Database Layer (`backend/src/agent/database.py`)
+
+Created a `ConversationDatabase` class that manages:
+- **Conversations table**: Stores conversation metadata (id, title, timestamps)
+- **Messages table**: Stores individual messages with role (human/ai) and content
+- SQLite database with proper indexing for performance
+
+Key methods:
+- `create_conversation()`: Create a new conversation
+- `get_conversation()`: Get conversation by ID
+- `get_all_conversations()`: List all conversations
+- `add_message()`: Add a message to a conversation
+- `get_messages()`: Get all messages for a conversation
+- `delete_conversation()`: Delete a conversation
+- `update_conversation_title()`: Update conversation title
+
+### 2. API Endpoints (`backend/src/agent/app.py`)
+
+Added REST API endpoints:
+
+```
+GET /api/conversations - List all conversations
+GET /api/conversation/{id} - Get specific conversation
+POST /api/conversation - Create new conversation
+POST /api/conversation/{id}/message - Add message to conversation
+GET /api/conversation/{id}/messages - Get all messages
+DELETE /api/conversation/{id} - Delete conversation
+PATCH /api/conversation/{id}/title - Update title
+```
+
+### 3. State Management (`backend/src/agent/state.py`)
+
+Added `conversation_id` field to `OverallState` to track which conversation the agent is processing.
+
+### 4. Utilities (`backend/src/agent/utils.py`)
+
+Added `load_conversation_history()` function to load previous messages from database and convert them to LangChain message format.
+
+## Frontend Changes
+
+### 1. ConversationHistory Component (`frontend/src/components/ConversationHistory.tsx`)
+
+A modal component that displays:
+- List of all conversations with titles and timestamps
+- Message count for each conversation
+- Delete button for each conversation
+- Click to resume any conversation
+- Beautiful space-themed UI matching the app design
+
+Features:
+- Formatted timestamps (Today, Yesterday, date)
+- Current conversation highlighting
+- Confirmation before deletion
+- Loading and error states
+
+### 2. App.tsx Updates
+
+Integrated conversation history with:
+- **History Button**: Fixed position button to open history modal
+- **New Chat Button**: Start a fresh conversation
+- **Auto-save**: Messages are automatically saved to database
+- **Resume**: Load previous conversations with all messages
+- **State Management**: Track current conversation ID
+
+Key functions added:
+- `createNewConversation()`: Create a new conversation when user starts chatting
+- `saveMessage()`: Save each message (human and AI) to database
+- `loadConversation()`: Load a previous conversation and restore state
+
+## Usage
+
+### Starting a New Conversation
+
+1. Open the app (welcome screen)
+2. Type a message and submit
+3. A new conversation is automatically created
+4. All messages are saved as they arrive
+
+### Viewing History
+
+1. Click the "History" button (top-left)
+2. See list of all previous conversations
+3. Click any conversation to resume it
+4. Delete unwanted conversations with the trash icon
+
+### Resuming a Conversation
+
+1. Open History
+2. Click on a conversation
+3. All previous messages load
+4. Continue the conversation from where you left off
+
+### Starting Fresh
+
+Click "New Chat" button to start a new conversation while keeping the previous one saved.
+
+## Database Schema
+
+### Conversations Table
+```sql
+CREATE TABLE conversations (
+ id TEXT PRIMARY KEY,
+ title TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT
+)
+```
+
+### Messages Table
+```sql
+CREATE TABLE messages (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ conversation_id TEXT,
+ role TEXT,
+ content TEXT,
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT,
+ FOREIGN KEY (conversation_id) REFERENCES conversations(id) ON DELETE CASCADE
+)
+```
+
+## File Structure
+
+```
+backend/src/agent/
+├── database.py # New: SQLite database manager
+├── app.py # Updated: Added API endpoints
+├── state.py # Updated: Added conversation_id field
+└── utils.py # Updated: Added load_conversation_history()
+
+frontend/src/
+├── components/
+│ ├── ConversationHistory.tsx # New: History modal component
+│ └── ...
+└── App.tsx # Updated: Integrated conversation history
+```
+
+## Future Enhancements
+
+Potential improvements:
+1. Search conversations by content
+2. Rename conversations with custom titles
+3. Export conversations
+4. Share conversations
+5. Conversation folders/tags
+6. Automatic title generation from first message
+7. Conversation analytics
+
+## Notes
+
+- Database file is created at `backend/conversations.db`
+- Messages are saved immediately after being sent/received
+- Deleting a conversation cascades to delete all its messages
+- The database is persistent across app restarts
diff --git a/GETTING_STARTED.md b/GETTING_STARTED.md
new file mode 100644
index 00000000..cb02f623
--- /dev/null
+++ b/GETTING_STARTED.md
@@ -0,0 +1,121 @@
+# Starting the Application with Conversation History
+
+## Quick Start
+
+### 1. Start the Backend Server
+
+Open a terminal in the `backend` directory and run:
+
+```bash
+cd backend
+langgraph dev
+```
+
+This will start the LangGraph server on `http://127.0.0.1:2024` with the new conversation history API endpoints.
+
+**Note:** Make sure you have:
+- Installed backend dependencies: `pip install .`
+- Set up your `.env` file with `GEMINI_API_KEY`
+
+### 2. Start the Frontend Development Server
+
+Open another terminal in the `frontend` directory and run:
+
+```bash
+cd frontend
+npm run dev
+```
+
+This will start the Vite dev server on `http://localhost:5173/app/`
+
+The frontend is now configured to proxy API requests to the backend server on port 2024.
+
+### 3. Access the Application
+
+Open your browser and navigate to: `http://localhost:5173/app/`
+
+## Features You Can Now Use
+
+### History Button
+- Click the "History" button (top-left) to view all your saved conversations
+- Each conversation shows:
+ - Title
+ - Timestamp (formatted as "Today", "Yesterday", or date)
+ - Message count
+ - Current conversation highlighted
+
+### Starting a Conversation
+- Type your question in the welcome screen
+- A new conversation is automatically created
+- All messages are saved to the SQLite database (`backend/conversations.db`)
+
+### Resuming a Conversation
+1. Click "History" button
+2. Click on any conversation from the list
+3. All previous messages load automatically
+4. Continue the conversation where you left off
+
+### Starting a New Chat
+- Click "New Chat" button (appears when you're in a conversation)
+- Starts a fresh conversation
+- Previous conversation remains saved
+
+### Deleting Conversations
+- Hover over any conversation in the history
+- Click the trash icon that appears
+- Confirm deletion
+
+## Troubleshooting
+
+### "Failed to create conversation" Error
+
+This error means the frontend can't reach the backend API. Check:
+
+1. **Backend is running**: Make sure `langgraph dev` is running in the backend directory
+2. **Correct port**: Backend should be on port 2024 (check terminal output)
+3. **Vite proxy**: The `frontend/vite.config.ts` should proxy `/api` to `http://127.0.0.1:2024`
+
+### WebSocket Connection Errors
+
+These are Vite HMR warnings and won't affect the application functionality. They occur because the app is served under `/app/` path.
+
+### Database Errors
+
+If you see SQLite errors:
+1. Make sure the `backend` directory is writable
+2. The database file `conversations.db` will be created automatically
+3. Delete `conversations.db` to reset all conversations
+
+## Database Location
+
+Conversations are stored in: `backend/conversations.db`
+
+You can:
+- Inspect the database with any SQLite browser
+- Delete it to start fresh
+- Back it up to preserve conversations
+
+## API Endpoints
+
+The backend now serves these additional endpoints:
+
+```
+GET /api/conversations - List all conversations
+GET /api/conversation/{id} - Get specific conversation
+POST /api/conversation - Create new conversation
+POST /api/conversation/{id}/message - Add message to conversation
+GET /api/conversation/{id}/messages - Get all messages
+DELETE /api/conversation/{id} - Delete conversation
+PATCH /api/conversation/{id}/title - Update title
+```
+
+## Testing the Database
+
+Run the test script to verify the database is working:
+
+```bash
+cd backend
+python test_database.py
+```
+
+You should see: `🎉 All tests passed!`
diff --git a/backend/conversations.db b/backend/conversations.db
new file mode 100644
index 00000000..f099fddb
Binary files /dev/null and b/backend/conversations.db differ
diff --git a/backend/src/agent/app.py b/backend/src/agent/app.py
index f20f6ed3..14c6d724 100644
--- a/backend/src/agent/app.py
+++ b/backend/src/agent/app.py
@@ -1,10 +1,143 @@
# mypy: disable - error - code = "no-untyped-def,misc"
import pathlib
-from fastapi import FastAPI, Response
+import uuid
+from typing import List, Optional
+from contextlib import asynccontextmanager
+from fastapi import FastAPI, Response, HTTPException
from fastapi.staticfiles import StaticFiles
+from pydantic import BaseModel
-# Define the FastAPI app
-app = FastAPI()
+from agent.database import ConversationDatabase
+
+# Initialize database globally
+db = None
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ """Lifespan context manager for startup and shutdown events."""
+ global db
+ # Startup: Initialize database
+ # Use absolute path to backend directory for the database
+ backend_dir = pathlib.Path(__file__).parent.parent.parent
+ db_path = backend_dir / "conversations.db"
+ db = ConversationDatabase(db_path=str(db_path))
+ await db._init_db()
+ print(f"Database initialized at: {db_path}")
+ yield
+ # Shutdown: cleanup if needed
+ pass
+
+
+# Define the FastAPI app with lifespan
+app = FastAPI(lifespan=lifespan)
+
+
+# Pydantic models for request/response
+class CreateConversationRequest(BaseModel):
+ title: Optional[str] = "New Conversation"
+ metadata: Optional[dict] = None
+
+
+class AddMessageRequest(BaseModel):
+ role: str # "human" or "ai"
+ content: str
+ metadata: Optional[dict] = None
+
+
+class ConversationResponse(BaseModel):
+ id: str
+ title: str
+ created_at: str
+ updated_at: str
+ message_count: Optional[int] = None
+ metadata: dict
+
+
+class MessageResponse(BaseModel):
+ id: int
+ conversation_id: str
+ role: str
+ content: str
+ timestamp: str
+ metadata: dict
+
+
+# API Endpoints
+@app.get("/api/conversations", response_model=List[ConversationResponse])
+async def get_conversations(limit: int = 50):
+ """Get all conversations."""
+ conversations = await db.get_all_conversations(limit=limit)
+ return conversations
+
+
+@app.get("/api/conversation/{conversation_id}", response_model=ConversationResponse)
+async def get_conversation(conversation_id: str):
+ """Get a specific conversation."""
+ conversation = await db.get_conversation(conversation_id)
+ if not conversation:
+ raise HTTPException(status_code=404, detail="Conversation not found")
+ return conversation
+
+
+@app.post("/api/conversation", response_model=ConversationResponse)
+async def create_conversation(request: CreateConversationRequest):
+ """Create a new conversation."""
+ conversation_id = str(uuid.uuid4())
+ conversation = await db.create_conversation(
+ conversation_id=conversation_id,
+ title=request.title or "New Conversation",
+ metadata=request.metadata
+ )
+ return conversation
+
+
+@app.post("/api/conversation/{conversation_id}/message", response_model=MessageResponse)
+async def add_message_to_conversation(conversation_id: str, request: AddMessageRequest):
+ """Add a message to a conversation."""
+ # Check if conversation exists
+ conversation = await db.get_conversation(conversation_id)
+ if not conversation:
+ raise HTTPException(status_code=404, detail="Conversation not found")
+
+ # Add message
+ message = await db.add_message(
+ conversation_id=conversation_id,
+ role=request.role,
+ content=request.content,
+ metadata=request.metadata
+ )
+ return message
+
+
+@app.get("/api/conversation/{conversation_id}/messages", response_model=List[MessageResponse])
+async def get_conversation_messages(conversation_id: str):
+ """Get all messages for a conversation."""
+ # Check if conversation exists
+ conversation = await db.get_conversation(conversation_id)
+ if not conversation:
+ raise HTTPException(status_code=404, detail="Conversation not found")
+
+ messages = await db.get_messages(conversation_id)
+ return messages
+
+
+@app.delete("/api/conversation/{conversation_id}")
+async def delete_conversation(conversation_id: str):
+ """Delete a conversation."""
+ deleted = await db.delete_conversation(conversation_id)
+ if not deleted:
+ raise HTTPException(status_code=404, detail="Conversation not found")
+ return {"success": True, "message": "Conversation deleted"}
+
+
+@app.patch("/api/conversation/{conversation_id}/title")
+async def update_conversation_title(conversation_id: str, title: str):
+ """Update conversation title."""
+ updated = await db.update_conversation_title(conversation_id, title)
+ if not updated:
+ raise HTTPException(status_code=404, detail="Conversation not found")
+ return {"success": True, "message": "Title updated"}
def create_frontend_router(build_dir="../frontend/dist"):
diff --git a/backend/src/agent/database.py b/backend/src/agent/database.py
new file mode 100644
index 00000000..e8226120
--- /dev/null
+++ b/backend/src/agent/database.py
@@ -0,0 +1,217 @@
+import sqlite3
+import json
+import asyncio
+from datetime import datetime
+from typing import List, Dict, Optional
+from pathlib import Path
+
+
+class ConversationDatabase:
+ """Manages conversation history using SQLite."""
+
+ def __init__(self, db_path: str = "conversations.db"):
+ """Initialize database connection. Call _init_db() separately to create tables."""
+ self.db_path = db_path
+
+ async def _init_db(self):
+ """Create tables if they don't exist."""
+ def _create_tables():
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+
+ # Create conversations table
+ cursor.execute("""
+ CREATE TABLE IF NOT EXISTS conversations (
+ id TEXT PRIMARY KEY,
+ title TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT
+ )
+ """)
+
+ # Create messages table
+ cursor.execute("""
+ CREATE TABLE IF NOT EXISTS messages (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ conversation_id TEXT,
+ role TEXT,
+ content TEXT,
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ metadata TEXT,
+ FOREIGN KEY (conversation_id) REFERENCES conversations(id) ON DELETE CASCADE
+ )
+ """)
+
+ # Create index for faster queries
+ cursor.execute("""
+ CREATE INDEX IF NOT EXISTS idx_conversation_id
+ ON messages(conversation_id)
+ """)
+
+ conn.commit()
+
+ await asyncio.to_thread(_create_tables)
+
+ async def create_conversation(self, conversation_id: str, title: str = "New Conversation", metadata: Optional[Dict] = None) -> Dict:
+ """Create a new conversation."""
+ def _create():
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+ cursor.execute("""
+ INSERT INTO conversations (id, title, metadata)
+ VALUES (?, ?, ?)
+ """, (conversation_id, title, json.dumps(metadata or {})))
+ conn.commit()
+
+ return {
+ "id": conversation_id,
+ "title": title,
+ "created_at": datetime.now().isoformat(),
+ "updated_at": datetime.now().isoformat(),
+ "metadata": metadata or {}
+ }
+
+ return await asyncio.to_thread(_create)
+
+ async def get_conversation(self, conversation_id: str) -> Optional[Dict]:
+ """Get a specific conversation by ID."""
+ def _get():
+ with sqlite3.connect(self.db_path) as conn:
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+ cursor.execute("""
+ SELECT * FROM conversations WHERE id = ?
+ """, (conversation_id,))
+ row = cursor.fetchone()
+
+ if row:
+ return {
+ "id": row["id"],
+ "title": row["title"],
+ "created_at": row["created_at"],
+ "updated_at": row["updated_at"],
+ "metadata": json.loads(row["metadata"]) if row["metadata"] else {}
+ }
+ return None
+
+ return await asyncio.to_thread(_get)
+
+ async def get_all_conversations(self, limit: int = 50) -> List[Dict]:
+ """Get all conversations, ordered by most recent."""
+ def _get_all():
+ with sqlite3.connect(self.db_path) as conn:
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+ cursor.execute("""
+ SELECT c.*, COUNT(m.id) as message_count
+ FROM conversations c
+ LEFT JOIN messages m ON c.id = m.conversation_id
+ GROUP BY c.id
+ ORDER BY c.updated_at DESC
+ LIMIT ?
+ """, (limit,))
+ rows = cursor.fetchall()
+
+ return [
+ {
+ "id": row["id"],
+ "title": row["title"],
+ "created_at": row["created_at"],
+ "updated_at": row["updated_at"],
+ "message_count": row["message_count"],
+ "metadata": json.loads(row["metadata"]) if row["metadata"] else {}
+ }
+ for row in rows
+ ]
+
+ return await asyncio.to_thread(_get_all)
+
+ async def add_message(self, conversation_id: str, role: str, content: str, metadata: Optional[Dict] = None) -> Dict:
+ """Add a message to a conversation."""
+ def _add():
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+
+ # Add message
+ cursor.execute("""
+ INSERT INTO messages (conversation_id, role, content, metadata)
+ VALUES (?, ?, ?, ?)
+ """, (conversation_id, role, content, json.dumps(metadata or {})))
+
+ message_id = cursor.lastrowid
+
+ # Update conversation's updated_at timestamp
+ cursor.execute("""
+ UPDATE conversations
+ SET updated_at = CURRENT_TIMESTAMP
+ WHERE id = ?
+ """, (conversation_id,))
+
+ conn.commit()
+
+ return {
+ "id": message_id,
+ "conversation_id": conversation_id,
+ "role": role,
+ "content": content,
+ "timestamp": datetime.now().isoformat(),
+ "metadata": metadata or {}
+ }
+
+ return await asyncio.to_thread(_add)
+
+ async def get_messages(self, conversation_id: str) -> List[Dict]:
+ """Get all messages for a conversation."""
+ def _get_msgs():
+ with sqlite3.connect(self.db_path) as conn:
+ conn.row_factory = sqlite3.Row
+ cursor = conn.cursor()
+ cursor.execute("""
+ SELECT * FROM messages
+ WHERE conversation_id = ?
+ ORDER BY timestamp ASC
+ """, (conversation_id,))
+ rows = cursor.fetchall()
+
+ return [
+ {
+ "id": row["id"],
+ "conversation_id": row["conversation_id"],
+ "role": row["role"],
+ "content": row["content"],
+ "timestamp": row["timestamp"],
+ "metadata": json.loads(row["metadata"]) if row["metadata"] else {}
+ }
+ for row in rows
+ ]
+
+ return await asyncio.to_thread(_get_msgs)
+
+ async def delete_conversation(self, conversation_id: str) -> bool:
+ """Delete a conversation and all its messages."""
+ def _delete():
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+ cursor.execute("DELETE FROM conversations WHERE id = ?", (conversation_id,))
+ deleted = cursor.rowcount > 0
+ conn.commit()
+ return deleted
+
+ return await asyncio.to_thread(_delete)
+
+ async def update_conversation_title(self, conversation_id: str, title: str) -> bool:
+ """Update the title of a conversation."""
+ def _update():
+ with sqlite3.connect(self.db_path) as conn:
+ cursor = conn.cursor()
+ cursor.execute("""
+ UPDATE conversations
+ SET title = ?, updated_at = CURRENT_TIMESTAMP
+ WHERE id = ?
+ """, (title, conversation_id))
+ updated = cursor.rowcount > 0
+ conn.commit()
+ return updated
+
+ return await asyncio.to_thread(_update)
diff --git a/backend/src/agent/graph.py b/backend/src/agent/graph.py
index 0f19c3f2..aef54fb5 100644
--- a/backend/src/agent/graph.py
+++ b/backend/src/agent/graph.py
@@ -1,4 +1,5 @@
import os
+import re
from agent.tools_and_schemas import SearchQueryList, Reflection
from dotenv import load_dotenv
@@ -40,6 +41,77 @@
genai_client = Client(api_key=os.getenv("GEMINI_API_KEY"))
+def clean_markdown_formatting(text: str) -> str:
+ """Post-process the generated answer to clean up markdown formatting.
+
+ This function performs the following cleanup operations:
+ - Fixes malformed markdown tables (ensures proper spacing and alignment)
+ - Cleans up citation placement (removes duplicate spaces around citations)
+ - Ensures proper line breaks around tables
+ - Normalizes whitespace
+
+ Args:
+ text: The raw markdown text generated by the model
+
+ Returns:
+ Cleaned and properly formatted markdown text
+ """
+ if not text:
+ return text
+
+ # Fix table formatting: ensure proper spacing around pipes
+ # Match markdown tables and normalize spacing
+ def fix_table_row(match):
+ row = match.group(0)
+ # Split by pipes and clean each cell
+ cells = [cell.strip() for cell in row.split('|')]
+ # Rejoin with proper spacing
+ return '| ' + ' | '.join(filter(None, cells)) + ' |'
+
+ # Fix table rows (lines with pipes)
+ lines = text.split('\n')
+ cleaned_lines = []
+ in_table = False
+
+ for line in lines:
+ # Detect table rows (lines with multiple pipes)
+ if '|' in line and line.count('|') >= 2:
+ in_table = True
+ # Fix spacing around pipes
+ cells = [cell.strip() for cell in line.split('|')]
+ cells = [cell for cell in cells if cell] # Remove empty cells
+ if cells:
+ cleaned_line = '| ' + ' | '.join(cells) + ' |'
+ cleaned_lines.append(cleaned_line)
+ else:
+ cleaned_lines.append(line)
+ else:
+ # Add blank line after table ends
+ if in_table and line.strip():
+ cleaned_lines.append('')
+ in_table = False
+ cleaned_lines.append(line)
+
+ text = '\n'.join(cleaned_lines)
+
+ # Clean up citation formatting
+ # Remove extra spaces around citations: "text [source](url)" -> "text [source](url)"
+ text = re.sub(r'\s+(\[[\w\s]+\]\([^)]+\))', r' \1', text)
+
+ # Ensure citations are directly after the relevant text (no space before citation)
+ # "text [source]" is correct, "text [source]" gets fixed
+ text = re.sub(r'([^\s])\s{2,}(\[[\w\s]+\]\([^)]+\))', r'\1 \2', text)
+
+ # Clean up multiple consecutive blank lines
+ text = re.sub(r'\n{3,}', '\n\n', text)
+
+ # Ensure blank line before and after tables
+ text = re.sub(r'([^\n])\n(\|)', r'\1\n\n\2', text)
+ text = re.sub(r'(\|[^\n]+)\n([^\n|])', r'\1\n\n\2', text)
+
+ return text.strip()
+
+
# Nodes
def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerationState:
"""LangGraph node that generates search queries based on the User's question.
@@ -259,8 +331,11 @@ def finalize_answer(state: OverallState, config: RunnableConfig):
)
unique_sources.append(source)
+ # Post-process the generated answer to clean up markdown formatting
+ cleaned_content = clean_markdown_formatting(result.content)
+
return {
- "messages": [AIMessage(content=result.content)],
+ "messages": [AIMessage(content=cleaned_content)],
"sources_gathered": unique_sources,
}
diff --git a/backend/src/agent/prompts.py b/backend/src/agent/prompts.py
index 8963f6a6..39883a50 100644
--- a/backend/src/agent/prompts.py
+++ b/backend/src/agent/prompts.py
@@ -87,6 +87,22 @@ def get_current_date():
- You have access to all the information gathered from the previous steps.
- You have access to the user's question.
- Generate a high-quality answer to the user's question based on the provided summaries and the user's question.
+
+Markdown Formatting Requirements:
+- Use proper markdown syntax for all formatting elements.
+- Format any tables using standard markdown table syntax with pipes (|) and hyphens (-).
+- Ensure tables are properly aligned with header separators (e.g., | Column 1 | Column 2 |).
+- Example table format:
+ | Header 1 | Header 2 | Header 3 |
+ |----------|----------|----------|
+ | Data 1 | Data 2 | Data 3 |
+
+Citation Requirements (CRITICAL):
+- Place citations as markdown links immediately after each fact, claim, or data point: [source](url)
+- Citations must be placed right next to the relevant information, not at the end of paragraphs.
+- Use the exact URLs from the Summaries - do not modify them.
+- Every factual statement MUST include a citation.
+- Format: "The company's revenue grew by 25% [reuters](https://example.com/article)."
- Include the sources you used from the Summaries in the answer correctly, use markdown format (e.g. [apnews](https://vertexaisearch.cloud.google.com/id/1-0)). THIS IS A MUST.
User Context:
diff --git a/backend/src/agent/state.py b/backend/src/agent/state.py
index d5ad4dcd..cd9af7ec 100644
--- a/backend/src/agent/state.py
+++ b/backend/src/agent/state.py
@@ -19,6 +19,7 @@ class OverallState(TypedDict):
max_research_loops: int
research_loop_count: int
reasoning_model: str
+ conversation_id: str # Track which conversation this state belongs to
class ReflectionState(TypedDict):
diff --git a/backend/src/agent/utils.py b/backend/src/agent/utils.py
index d02c8d91..1fa2176a 100644
--- a/backend/src/agent/utils.py
+++ b/backend/src/agent/utils.py
@@ -2,6 +2,31 @@
from langchain_core.messages import AnyMessage, AIMessage, HumanMessage
+def load_conversation_history(conversation_id: str) -> List[AnyMessage]:
+ """
+ Load conversation history from database and convert to LangChain messages.
+
+ Args:
+ conversation_id: The ID of the conversation to load
+
+ Returns:
+ List of LangChain messages (HumanMessage or AIMessage)
+ """
+ from agent.database import ConversationDatabase
+
+ db = ConversationDatabase()
+ messages_data = db.get_messages(conversation_id)
+
+ langchain_messages = []
+ for msg in messages_data:
+ if msg["role"] == "human":
+ langchain_messages.append(HumanMessage(content=msg["content"]))
+ elif msg["role"] == "ai":
+ langchain_messages.append(AIMessage(content=msg["content"]))
+
+ return langchain_messages
+
+
def get_research_topic(messages: List[AnyMessage]) -> str:
"""
Get the research topic from the messages.
diff --git a/backend/test_conversations.db b/backend/test_conversations.db
new file mode 100644
index 00000000..50b432bb
Binary files /dev/null and b/backend/test_conversations.db differ
diff --git a/backend/test_database.py b/backend/test_database.py
new file mode 100644
index 00000000..a9427b95
--- /dev/null
+++ b/backend/test_database.py
@@ -0,0 +1,79 @@
+"""
+Test script for conversation database functionality.
+Run this to verify the database is working correctly.
+"""
+
+import sys
+from pathlib import Path
+
+# Add the src directory to the path
+sys.path.insert(0, str(Path(__file__).parent / "src"))
+
+from agent.database import ConversationDatabase
+
+
+def test_database():
+ """Test basic database operations."""
+ print("🧪 Testing Conversation Database...\n")
+
+ # Initialize database
+ db = ConversationDatabase("test_conversations.db")
+ print("✅ Database initialized")
+
+ # Create a conversation
+ conv = db.create_conversation(
+ conversation_id="test-123",
+ title="Test Conversation",
+ metadata={"test": True}
+ )
+ print(f"✅ Created conversation: {conv['id']}")
+
+ # Add messages
+ msg1 = db.add_message(
+ conversation_id="test-123",
+ role="human",
+ content="Hello, how are you?"
+ )
+ print(f"✅ Added human message: {msg1['id']}")
+
+ msg2 = db.add_message(
+ conversation_id="test-123",
+ role="ai",
+ content="I'm doing great! How can I help you today?"
+ )
+ print(f"✅ Added AI message: {msg2['id']}")
+
+ # Get conversation
+ retrieved = db.get_conversation("test-123")
+ print(f"✅ Retrieved conversation: {retrieved['title']}")
+
+ # Get messages
+ messages = db.get_messages("test-123")
+ print(f"✅ Retrieved {len(messages)} messages")
+
+ # Get all conversations
+ all_convs = db.get_all_conversations()
+ print(f"✅ Found {len(all_convs)} total conversations")
+
+ # Update title
+ db.update_conversation_title("test-123", "Updated Test Title")
+ updated = db.get_conversation("test-123")
+ print(f"✅ Updated title to: {updated['title']}")
+
+ # Delete conversation
+ deleted = db.delete_conversation("test-123")
+ print(f"✅ Deleted conversation: {deleted}")
+
+ # Verify deletion
+ after_delete = db.get_all_conversations()
+ print(f"✅ Conversations after delete: {len(after_delete)}")
+
+ print("\n🎉 All tests passed!")
+
+ # Cleanup
+ Path("test_conversations.db").unlink(missing_ok=True)
+ print("🧹 Cleaned up test database")
+
+
+if __name__ == "__main__":
+ test_database()
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index d06d4021..f6f967f7 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -4,9 +4,15 @@ import { useState, useEffect, useRef, useCallback } from "react";
import { ProcessedEvent } from "@/components/ActivityTimeline";
import { WelcomeScreen } from "@/components/WelcomeScreen";
import { ChatMessagesView } from "@/components/ChatMessagesView";
+import { ConversationHistory } from "@/components/ConversationHistory";
import { Button } from "@/components/ui/button";
+import { History } from "lucide-react";
export default function App() {
+ const [showHistory, setShowHistory] = useState(false);
+ const [currentConversationId, setCurrentConversationId] = useState<
+ string | null
+ >(null);
const [processedEventsTimeline, setProcessedEventsTimeline] = useState<
ProcessedEvent[]
>([]);
@@ -99,16 +105,104 @@ export default function App() {
}
}, [thread.messages, thread.isLoading, processedEventsTimeline]);
+ const createNewConversation = useCallback(async () => {
+ try {
+ const response = await fetch("/api/conversation", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({
+ title: "New Conversation",
+ }),
+ });
+
+ if (!response.ok) {
+ throw new Error("Failed to create conversation");
+ }
+
+ const data = await response.json();
+ setCurrentConversationId(data.id);
+ return data.id;
+ } catch (error) {
+ console.error("Error creating conversation:", error);
+ return null;
+ }
+ }, []);
+
+ const saveMessage = useCallback(
+ async (role: string, content: string, conversationId: string) => {
+ try {
+ await fetch(`/api/conversation/${conversationId}/message`, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({
+ role,
+ content,
+ }),
+ });
+ } catch (error) {
+ console.error("Error saving message:", error);
+ }
+ },
+ []
+ );
+
+ const loadConversation = useCallback(
+ async (conversationId: string) => {
+ try {
+ const response = await fetch(
+ `/api/conversation/${conversationId}/messages`
+ );
+ if (!response.ok) {
+ throw new Error("Failed to load conversation");
+ }
+
+ const messages = await response.json();
+ const langchainMessages: Message[] = messages.map((msg: any) => ({
+ type: msg.role === "human" ? "human" : "ai",
+ content: msg.content,
+ id: msg.id.toString(),
+ }));
+
+ setCurrentConversationId(conversationId);
+ thread.submit({
+ messages: langchainMessages,
+ initial_search_query_count: 3,
+ max_research_loops: 3,
+ reasoning_model: "gemini-2.0-flash-thinking-exp-01-21",
+ });
+ setShowHistory(false);
+ } catch (error) {
+ console.error("Error loading conversation:", error);
+ alert("Failed to load conversation");
+ }
+ },
+ [thread]
+ );
+
const handleSubmit = useCallback(
- (submittedInputValue: string, effort: string, model: string) => {
+ async (submittedInputValue: string, effort: string, model: string) => {
if (!submittedInputValue.trim()) return;
setProcessedEventsTimeline([]);
hasFinalizeEventOccurredRef.current = false;
+ // Create a new conversation if we don't have one
+ let conversationId = currentConversationId;
+ if (!conversationId) {
+ conversationId = await createNewConversation();
+ if (!conversationId) {
+ alert("Failed to create conversation");
+ return;
+ }
+ }
+
+ // Save the user message
+ await saveMessage("human", submittedInputValue, conversationId);
+
// convert effort to, initial_search_query_count and max_research_loops
- // low means max 1 loop and 1 query
- // medium means max 3 loops and 3 queries
- // high means max 10 loops and 5 queries
let initial_search_query_count = 0;
let max_research_loops = 0;
switch (effort) {
@@ -141,16 +235,65 @@ export default function App() {
reasoning_model: model,
});
},
- [thread]
+ [thread, currentConversationId, createNewConversation, saveMessage]
);
+ // Save AI messages when they arrive
+ useEffect(() => {
+ if (
+ currentConversationId &&
+ thread.messages.length > 0 &&
+ !thread.isLoading
+ ) {
+ const lastMessage = thread.messages[thread.messages.length - 1];
+ if (lastMessage && lastMessage.type === "ai") {
+ saveMessage("ai", lastMessage.content as string, currentConversationId);
+ }
+ }
+ }, [thread.messages, thread.isLoading, currentConversationId, saveMessage]);
+
const handleCancel = useCallback(() => {
thread.stop();
window.location.reload();
}, [thread]);
+ const handleNewConversation = useCallback(() => {
+ setCurrentConversationId(null);
+ window.location.reload();
+ }, []);
+
return (
+ {/* History Button - Fixed Position */}
+
+
+ {/* New Conversation Button - Show when in conversation */}
+ {thread.messages.length > 0 && (
+
+ )}
+
+ {/* Conversation History Modal */}
+ {showHistory && (
+
setShowHistory(false)}
+ currentConversationId={currentConversationId || undefined}
+ />
+ )}
+
{thread.messages.length === 0 ? (
(false);
const getEventIcon = (title: string, index: number) => {
if (index === 0 && isLoading && processedEvents.length === 0) {
- return ;
+ return ;
}
if (title.toLowerCase().includes("generating")) {
- return ;
+ return ;
} else if (title.toLowerCase().includes("thinking")) {
- return ;
+ return ;
} else if (title.toLowerCase().includes("reflection")) {
- return ;
+ return ;
} else if (title.toLowerCase().includes("research")) {
- return ;
+ return ;
} else if (title.toLowerCase().includes("finalizing")) {
- return ;
+ return ;
}
- return ;
+ return ;
};
useEffect(() => {
@@ -59,18 +59,18 @@ export function ActivityTimeline({
}, [isLoading, processedEvents]);
return (
-
-
+
+
setIsTimelineCollapsed(!isTimelineCollapsed)}
>
Research
{isTimelineCollapsed ? (
-
+
) : (
-
+
)}
@@ -80,12 +80,12 @@ export function ActivityTimeline({
{isLoading && processedEvents.length === 0 && (
-
-
-
+
+
+
@@ -97,16 +97,16 @@ export function ActivityTimeline({
{index < processedEvents.length - 1 ||
(isLoading && index === processedEvents.length - 1) ? (
-
+
) : null}
-
+
{getEventIcon(eventItem.title, index)}
-
+
{eventItem.title}
-
+
{typeof eventItem.data === "string"
? eventItem.data
: Array.isArray(eventItem.data)
@@ -118,11 +118,11 @@ export function ActivityTimeline({
))}
{isLoading && processedEvents.length > 0 && (
-
-
+
+
@@ -130,10 +130,10 @@ export function ActivityTimeline({
)}
) : !isLoading ? ( // Only show "No activity" if not loading and no events
-
-
-
No activity to display.
-
+
+
+
No activity to display.
+
Timeline will update during processing.
diff --git a/frontend/src/components/ChatMessagesView.tsx b/frontend/src/components/ChatMessagesView.tsx
index 1a245d88..4638f441 100644
--- a/frontend/src/components/ChatMessagesView.tsx
+++ b/frontend/src/components/ChatMessagesView.tsx
@@ -4,7 +4,7 @@ import { ScrollArea } from "@/components/ui/scroll-area";
import { Loader2, Copy, CopyCheck } from "lucide-react";
import { InputForm } from "@/components/InputForm";
import { Button } from "@/components/ui/button";
-import { useState, ReactNode } from "react";
+import { useState, ReactNode, useEffect } from "react";
import ReactMarkdown from "react-markdown";
import { cn } from "@/lib/utils";
import { Badge } from "@/components/ui/badge";
@@ -13,6 +13,40 @@ import {
ProcessedEvent,
} from "@/components/ActivityTimeline"; // Assuming ActivityTimeline is in the same dir or adjust path
+const Star = ({ delay }: { delay: number }) => (
+
+);
+
+const ShootingStar = ({ delay }: { delay: number }) => (
+
+);
+
+const Sunray = ({ delay, angle }: { delay: number; angle: number }) => (
+
+);
+
// Markdown component props type from former ReportView
type MdComponentProps = {
className?: string;
@@ -38,7 +72,7 @@ const mdComponents = {
),
p: ({ className, children, ...props }: MdComponentProps) => (
-
+
{children}
),
@@ -147,7 +181,8 @@ const HumanMessageBubble: React.FC
= ({
}) => {
return (
{typeof message.content === "string"
@@ -187,7 +222,7 @@ const AiMessageBubble: React.FC = ({
const isLiveActivityForThisBubble = isLastMessage && isOverallLoading;
return (
-
+
{activityForThisBubble && activityForThisBubble.length > 0 && (
(null);
+ const [stars, setStars] = useState([]);
+
+ useEffect(() => {
+ // Generate 50 random stars for background twinkling/falling
+ const starDelays = Array.from({ length: 50 }, (_, i) => i * 0.1);
+ setStars(starDelays);
+ }, []);
+
+ const sunrays = Array.from({ length: 12 }, (_, i) => ({
+ angle: (i * 30) - 165,
+ delay: i * 0.5,
+ }));
const handleCopy = async (text: string, messageId: string) => {
try {
@@ -253,9 +300,49 @@ export function ChatMessagesView({
}
};
return (
-
-
-
+
+ {/* Space Background with Darker Nebula Gradient */}
+
+ {/* Twinkling Stars */}
+ {stars.map((delay, i) => (
+
+ ))}
+
+ {/* Falling Stars */}
+ {stars.slice(0, 20).map((delay, i) => (
+
+ ))}
+
+ {/* Shooting Stars */}
+ {stars.slice(0, 5).map((delay, i) => (
+
+ ))}
+
+ {/* Sunrays */}
+
+ {sunrays.map((ray, i) => (
+
+ ))}
+
+
+ {/* Subtle Solar System Orb - Central Glow */}
+
+
+ {/* Orbiting Planet (subtle) */}
+
+
+
+
{messages.map((message, index) => {
const isLast = index === messages.length - 1;
return (
@@ -292,7 +379,7 @@ export function ChatMessagesView({
{" "}
{/* AI message row structure */}
-
+
{liveActivityEvents.length > 0 ? (
- 0}
- />
+
+ 0}
+ />
+
+
+
);
}
diff --git a/frontend/src/components/ConversationHistory.tsx b/frontend/src/components/ConversationHistory.tsx
new file mode 100644
index 00000000..6b04f3e7
--- /dev/null
+++ b/frontend/src/components/ConversationHistory.tsx
@@ -0,0 +1,187 @@
+import { useEffect, useState } from "react";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { ScrollArea } from "@/components/ui/scroll-area";
+import { Button } from "@/components/ui/button";
+import { MessageSquare, Trash2, X, Clock, Loader2 } from "lucide-react";
+import { Badge } from "@/components/ui/badge";
+
+interface Conversation {
+ id: string;
+ title: string;
+ created_at: string;
+ updated_at: string;
+ message_count: number;
+ metadata: Record
;
+}
+
+interface ConversationHistoryProps {
+ onSelectConversation: (conversationId: string) => void;
+ onClose: () => void;
+ currentConversationId?: string;
+}
+
+export function ConversationHistory({
+ onSelectConversation,
+ onClose,
+ currentConversationId,
+}: ConversationHistoryProps) {
+ const [conversations, setConversations] = useState([]);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ useEffect(() => {
+ fetchConversations();
+ }, []);
+
+ const fetchConversations = async () => {
+ try {
+ setLoading(true);
+ const response = await fetch("/api/conversations");
+ if (!response.ok) {
+ throw new Error("Failed to fetch conversations");
+ }
+ const data = await response.json();
+ setConversations(data);
+ setError(null);
+ } catch (err) {
+ setError(err instanceof Error ? err.message : "Failed to load conversations");
+ } finally {
+ setLoading(false);
+ }
+ };
+
+ const deleteConversation = async (conversationId: string, event: React.MouseEvent) => {
+ event.stopPropagation();
+
+ if (!confirm("Are you sure you want to delete this conversation?")) {
+ return;
+ }
+
+ try {
+ const response = await fetch(`/api/conversation/${conversationId}`, {
+ method: "DELETE",
+ });
+
+ if (!response.ok) {
+ throw new Error("Failed to delete conversation");
+ }
+
+ // Refresh the list
+ fetchConversations();
+ } catch (err) {
+ console.error("Error deleting conversation:", err);
+ alert("Failed to delete conversation");
+ }
+ };
+
+ const formatDate = (dateString: string) => {
+ const date = new Date(dateString);
+ const now = new Date();
+ const diffInHours = (now.getTime() - date.getTime()) / (1000 * 60 * 60);
+
+ if (diffInHours < 24) {
+ return date.toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" });
+ } else if (diffInHours < 48) {
+ return "Yesterday";
+ } else {
+ return date.toLocaleDateString([], { month: "short", day: "numeric" });
+ }
+ };
+
+ return (
+
+
+
+
+
+
+ Conversation History
+
+
+ Resume or delete previous conversations
+
+
+
+
+
+
+
+ {loading ? (
+
+
+
+ ) : error ? (
+
+ ) : conversations.length === 0 ? (
+
+
+
No conversations yet
+
Start a new conversation to see it here
+
+ ) : (
+
+ {conversations.map((conversation) => (
+
onSelectConversation(conversation.id)}
+ className={`group relative p-4 rounded-lg border transition-all cursor-pointer ${
+ currentConversationId === conversation.id
+ ? "bg-indigo-500/20 border-indigo-500/50 shadow-md"
+ : "bg-neutral-800/40 border-neutral-700/50 hover:bg-neutral-800/60 hover:border-indigo-500/30"
+ }`}
+ >
+
+
+
+
+
+ {conversation.title}
+
+ {currentConversationId === conversation.id && (
+
+ Current
+
+ )}
+
+
+
+
+ {formatDate(conversation.updated_at)}
+
+
+ {conversation.message_count}{" "}
+ {conversation.message_count === 1 ? "message" : "messages"}
+
+
+
+
+
+
+ ))}
+
+ )}
+
+
+
+
+ );
+}
diff --git a/frontend/src/components/InputForm.tsx b/frontend/src/components/InputForm.tsx
index 97aa5c67..feecdf3c 100644
--- a/frontend/src/components/InputForm.tsx
+++ b/frontend/src/components/InputForm.tsx
@@ -26,7 +26,7 @@ export const InputForm: React.FC = ({
}) => {
const [internalInputValue, setInternalInputValue] = useState("");
const [effort, setEffort] = useState("medium");
- const [model, setModel] = useState("gemini-2.5-flash-preview-04-17");
+ const [model, setModel] = useState("gemini-2.5-flash-preview-09-2025");
const handleInternalSubmit = (e?: React.FormEvent) => {
if (e) e.preventDefault();
@@ -144,7 +144,7 @@ export const InputForm: React.FC = ({
diff --git a/frontend/src/components/WelcomeScreen.tsx b/frontend/src/components/WelcomeScreen.tsx
index b1015aa8..bfbe14be 100644
--- a/frontend/src/components/WelcomeScreen.tsx
+++ b/frontend/src/components/WelcomeScreen.tsx
@@ -1,4 +1,5 @@
import { InputForm } from "./InputForm";
+import { useEffect, useState } from "react";
interface WelcomeScreenProps {
handleSubmit: (
@@ -10,30 +11,220 @@ interface WelcomeScreenProps {
isLoading: boolean;
}
+const Star = ({ delay }: { delay: number }) => (
+
+);
+
+const ShootingStar = ({ delay }: { delay: number }) => (
+
+);
+
+const Sunray = ({ delay, angle }: { delay: number; angle: number }) => (
+
+);
+
export const WelcomeScreen: React.FC
= ({
handleSubmit,
onCancel,
isLoading,
-}) => (
-
-
-
- Welcome.
-
-
- How can I help you today?
-
-
-
-
+}) => {
+ const [stars, setStars] = useState
([]);
+
+ useEffect(() => {
+ // Generate 50 random stars for background twinkling/falling
+ const starDelays = Array.from({ length: 50 }, (_, i) => i * 0.1);
+ setStars(starDelays);
+ }, []);
+
+ const sunrays = Array.from({ length: 12 }, (_, i) => ({
+ angle: (i * 30) - 165,
+ delay: i * 0.5,
+ }));
+
+ return (
+
+ {/* Space Background with Darker Nebula Gradient */}
+
+ {/* Twinkling Stars */}
+ {stars.map((delay, i) => (
+
+ ))}
+
+ {/* Falling Stars */}
+ {stars.slice(0, 20).map((delay, i) => (
+
+ ))}
+
+ {/* Shooting Stars */}
+ {stars.slice(0, 5).map((delay, i) => (
+
+ ))}
+
+ {/* Sunrays */}
+
+ {sunrays.map((ray, i) => (
+
+ ))}
+
+
+ {/* Subtle Solar System Orb - Central Glow */}
+
+
+ {/* Orbiting Planet (subtle) */}
+
+
+
+ {/* Content Overlay */}
+
+
+
+ Welcome.
+
+
+ How can I help you today?
+
+
+
+
+
+
+ Powered by Google Gemini and LangChain LangGraph.
+
+
+
+
-
- Powered by Google Gemini and LangChain LangGraph.
-
-
-);
+ );
+};
\ No newline at end of file
diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts
index 8abaa377..763ea282 100644
--- a/frontend/vite.config.ts
+++ b/frontend/vite.config.ts
@@ -14,12 +14,11 @@ export default defineConfig({
},
server: {
proxy: {
- // Proxy API requests to the backend server
+ // Proxy API requests to the LangGraph backend server
"/api": {
- target: "http://127.0.0.1:8000", // Default backend address
+ target: "http://127.0.0.1:2024", // LangGraph dev server port
changeOrigin: true,
- // Optionally rewrite path if needed (e.g., remove /api prefix if backend doesn't expect it)
- // rewrite: (path) => path.replace(/^\/api/, ''),
+ secure: false,
},
},
},