diff --git a/custom-guidelines-pseudocode.txt b/custom-guidelines-pseudocode.txt new file mode 100644 index 0000000..bbd1d45 --- /dev/null +++ b/custom-guidelines-pseudocode.txt @@ -0,0 +1,11 @@ +if session_notes: + custom_guidelines = get_cg("aklsdjfkljasdf", "fasdfasdjf") +else: + + custom_guidelines = get_cg("aklsdjfkljasdf") + +get_cg(general_guidelines, session_notes = "") -> str: + return custom_guidelines + + +systemp = f"{custom-guidelines}" diff --git a/src/client/app/api/journal/save/route.js b/src/client/app/api/journal/save/route.js new file mode 100644 index 0000000..8e5e911 --- /dev/null +++ b/src/client/app/api/journal/save/route.js @@ -0,0 +1,22 @@ +import { NextResponse } from 'next/server'; + +export async function POST(req) { + try { + const { userId, text, id } = await req.json(); + + // No vectorization here! Just send the raw text to Python. + const res = await fetch('http://localhost:5001/upsert', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + user_id: userId, + text: text, + id: id + }) + }); + + return NextResponse.json(await res.json()); + } catch (err) { + return NextResponse.json({ error: err.message }, { status: 500 }); + } +} diff --git a/src/client/app/api/journal/summary/route.js b/src/client/app/api/journal/summary/route.js new file mode 100644 index 0000000..e36e5b3 --- /dev/null +++ b/src/client/app/api/journal/summary/route.js @@ -0,0 +1,50 @@ +import { NextResponse } from 'next/server'; +import { pipeline } from '@xenova/transformers'; + +export async function POST(req) { + try { + const { userId, topic } = await req.json(); + + // 1. Generate the Vector locally on your CPU + const pipe = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2'); + const output = await pipe(topic, { pooling: 'mean', normalize: true }); + const vector = Array.from(output.data); + + // 2. Fetch relevant entries from your Python Bridge + const actianRes = await fetch('http://localhost:5001/search', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + vector: vector, + user_id: userId + }), + }); + + const entries = await actianRes.json(); + + // Combine the retrieved logs into one "context" string + const context = entries.map(e => e.text).join("\n---\n"); + + // 3. Send to Ollama for the final summary + const ollamaRes = await fetch('http://localhost:11434/api/generate', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model: "llama3", + prompt: `Based on these journal entries:\n${context}\n\nSummarize the user's day regarding "${topic}" in one concise sentence.`, + stream: false + }), + }); + + const final = await ollamaRes.json(); + + return NextResponse.json({ + summary: final.response, + rawContext: context + }); + + } catch (error) { + console.error("Pipeline Error:", error); + return NextResponse.json({ error: "Check if Bridge and Ollama are running" }, { status: 500 }); + } +} diff --git a/src/client/app/api/therapy/chat/route.js b/src/client/app/api/therapy/chat/route.js new file mode 100644 index 0000000..11eb7b6 --- /dev/null +++ b/src/client/app/api/therapy/chat/route.js @@ -0,0 +1,23 @@ +export async function POST(req) { + const { userId, message, transcript, evidence, agenda } = await req.json(); + + const res = await fetch('http://localhost:5001/agent/run_session', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + user_id: userId, + message: message, + transcript: transcript, // Pass history back to Python + evidence: evidence, // Pass evidence back to Python + agenda: agenda + }) + }); + + if (!res.ok) throw new Error("Bridge failed"); + + const data = await res.json(); + return Response.json({ + reply: data.therapy_response, + fullTranscript: data.full_transcript + }); +} diff --git a/src/client/app/api/therapy/chat_stream/route.js b/src/client/app/api/therapy/chat_stream/route.js new file mode 100644 index 0000000..a3314e5 --- /dev/null +++ b/src/client/app/api/therapy/chat_stream/route.js @@ -0,0 +1,19 @@ +export async function POST(req) { + const body = await req.json(); + + const res = await fetch('http://localhost:5001/agent/chat_stream', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }); + + if (!res.ok) return new Response("Bridge Connection Failed", { status: 500 }); + + return new Response(res.body, { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + }, + }); +} diff --git a/src/client/app/api/therapy/end/route.js b/src/client/app/api/therapy/end/route.js new file mode 100644 index 0000000..975334b --- /dev/null +++ b/src/client/app/api/therapy/end/route.js @@ -0,0 +1,23 @@ +export async function POST(req) { + try { + const body = await req.json(); + + const res = await fetch('http://localhost:5001/agent/end_session', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + user_id: body.userId, + transcript: body.transcript, + evidence: body.evidence, + agenda: body.agenda + }) + }); + + if (!res.ok) throw new Error("Bridge failed"); + + const data = await res.json(); + return Response.json(data); + } catch (err) { + return Response.json({ error: err.message }, { status: 500 }); + } +} diff --git a/src/client/app/api/therapy/start/route.js b/src/client/app/api/therapy/start/route.js new file mode 100644 index 0000000..d1d1c53 --- /dev/null +++ b/src/client/app/api/therapy/start/route.js @@ -0,0 +1,21 @@ +import { NextResponse } from 'next/server'; + +export async function POST(req) { + const { userId, userNotes } = await req.json(); // Capture userNotes + + const res = await fetch('http://localhost:5001/agent/start', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + user_id: userId, + user_notes: userNotes // Pass it to Flask + }) + }); + + const data = await res.json(); + return NextResponse.json({ + openingMessage: data.food_for_thought, + evidenceFound: data.evidence, + agenda: data.agenda // Pass the generated agenda back to the frontend + }); +} diff --git a/src/client/app/components/ExerciseList.js b/src/client/app/components/ExerciseList.js new file mode 100644 index 0000000..05926dd --- /dev/null +++ b/src/client/app/components/ExerciseList.js @@ -0,0 +1,42 @@ +"use client"; +import { useState } from 'react'; +import GuidedExercise from './GuidedExercise'; + +export default function ExerciseList({ exercises }) { + const [activeExercise, setActiveExercise] = useState(null); + + if (activeExercise) { + return ( + setActiveExercise(null)} + /> + ); + } + + return ( +
+ {exercises.map((ex, i) => ( +
+
+ + {ex.type} + +

{ex.title}

+

{ex.content}

+
+ + +
+ ))} +
+ ); +} diff --git a/src/client/app/components/GuidedExercise.js b/src/client/app/components/GuidedExercise.js new file mode 100644 index 0000000..88be238 --- /dev/null +++ b/src/client/app/components/GuidedExercise.js @@ -0,0 +1,68 @@ +"use client"; +import { useState, useEffect } from 'react'; + +export default function GuidedExercise({ title, script, onExit }) { + // Split the script by our special token + const sentences = script.split("[BREAK]").map(s => s.trim()).filter(s => s); + const [index, setIndex] = useState(0); + const [isPlaying, setIsPlaying] = useState(false); + + const speak = (text) => { + // Stop any existing speech + window.speechSynthesis.cancel(); + + const utterance = new SpeechSynthesisUtterance(text); + utterance.rate = 0.9; // Slightly slower for therapy + + utterance.onend = () => { + setIsPlaying(false); + // Auto-advance the index so the UI highlights the next sentence + if (index < sentences.length - 1) { + setIndex(prev => prev + 1); + } + }; + + window.speechSynthesis.speak(utterance); + setIsPlaying(true); + }; + + return ( +
+ + +
+ Guided Session +

{title}

+ + {/* The Karaoke Display */} +
+ {sentences.map((s, i) => ( +

+ {s} +

+ ))} +
+ + {/* Control Button */} + + +

+ {index === sentences.length - 1 && !isPlaying ? "Exercise Complete" : "Press play to continue"} +

+
+
+ ); +} diff --git a/src/client/app/therapy/page.js b/src/client/app/therapy/page.js new file mode 100644 index 0000000..6a91962 --- /dev/null +++ b/src/client/app/therapy/page.js @@ -0,0 +1,211 @@ +"use client"; +import { useState, useEffect, useRef } from 'react'; +import ExerciseList from '../components/ExerciseList'; + +export default function TherapyPage() { + const [session, setSession] = useState(null); + const [chat, setChat] = useState([]); + const [input, setInput] = useState(""); + const [loading, setLoading] = useState(false); + const [exercises, setExercises] = useState(null); + const [userNotes, setUserNotes] = useState(""); // NEW + + // Ref for auto-scrolling the chat window + const chatEndRef = useRef(null); + + const scrollToBottom = () => { + chatEndRef.current?.scrollIntoView({ behavior: "smooth" }); + }; + + useEffect(() => { + scrollToBottom(); + }, [chat, loading]); + + // STEP 2 & 3: Start session and get "Food for Thought" from Gemini via Research Node + const startSession = async () => { + setLoading(true); + try { + const res = await fetch('/api/therapy/start', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ userId: 'horyzon', userNotes: userNotes }) + }); + const data = await res.json(); + + setSession({ + evidence: data.evidenceFound, // The clinical themes found in Actian + user_id: 'horyzon', + agenda: data.agenda + }); + + // The opening message generated in agents.py + setChat([{ role: 'assistant', content: data.openingMessage }]); + } catch (err) { + console.error("Failed to start session:", err); + } finally { + setLoading(false); + } + }; + + const sendMessage = async () => { + if (!input.trim() || loading) return; + + const userMsg = { role: 'user', content: input }; + const currentChat = [...chat, userMsg]; + setChat(currentChat); + setInput(""); + setLoading(true); + + // Add an empty assistant message we will fill up + setChat(prev => [...prev, { role: 'assistant', content: "" }]); + + const response = await fetch('/api/therapy/chat_stream', { // Hit a new streaming proxy + method: 'POST', + body: JSON.stringify({ userId: 'horyzon', message: input, transcript: chat, evidence: session.evidence, agenda: session.agenda }) + }); + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let fullReply = ""; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + + const chunk = decoder.decode(value); + fullReply += chunk; + + // Update the LAST message in the chat array with the new chunk + setChat(prev => { + const newChat = [...prev]; + newChat[newChat.length - 1].content = fullReply; + return newChat; + }); + } + setLoading(false); + }; + + const finishSession = async () => { + setLoading(true); + try { + // Create a proxy for this in /api/therapy/end/route.js or hit bridge via proxy + const res = await fetch('/api/therapy/end', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + userId: 'horyzon', + transcript: chat, + evidence: session.evidence, + agenda: session.agenda + }) + }); + const data = await res.json(); + setExercises(data.exercises); + } catch (err) { + console.error("Wrap up error:", err); + } finally { + setLoading(false); + } + }; + + // Render Exercise Results View + if (exercises) { + return ( +
+
+

Session Complete

+

Based on our conversation, here are three tailored tools for you:

+
+ +
+ +
+
+ ); + } + + return ( +
+ {!session ? ( +
+
+ 🌿 +
+

Start your session

+

+ I'll review your recent journal logs to help guide our conversation today. +

+