Skip to content

Commit dd50789

Browse files
author
AvatarOS Developer
committed
Fixed Planner Loop, refactor module data, context aware orchestrator implemented, added new providers
1 parent 899233f commit dd50789

File tree

11 files changed

+1564
-63
lines changed

11 files changed

+1564
-63
lines changed

server.js

Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1210,6 +1210,99 @@ app.post('/api/chat', requireAuth, async (req, res) => {
12101210
}
12111211
});
12121212

1213+
// Planner workflow generation endpoint
1214+
app.post('/api/planner/generate-from-context', requireAuth, async (req, res) => {
1215+
try {
1216+
const { context } = req.body;
1217+
if (!context) {
1218+
return res.status(400).json({ error: 'Context is required' });
1219+
}
1220+
1221+
const connections = getUserConnections(req.user.email);
1222+
1223+
// Create a workflow generation prompt
1224+
const systemPrompt = `You are a workflow generation assistant. Based on the provided application context, generate a workflow graph with nodes and connections that would be useful for the user's current situation.
1225+
1226+
Return a JSON object with:
1227+
- title: A descriptive title for the workflow
1228+
- nodes: Array of node objects with id, type, position {x, y}, and data {label, description}
1229+
- edges: Array of edge objects with id, source, target
1230+
1231+
Available node types: module, assistant, task, tool, workflow, connector, source, model-provider
1232+
1233+
Focus on creating practical, actionable workflows that relate to the user's current context.`;
1234+
1235+
const userPrompt = `Current Application Context:
1236+
- Active App: ${context.activeApp}
1237+
- Active Module: ${context.activeModule ? `${context.activeModule.id} (${context.activeModule['Module Title']})` : 'None'}
1238+
- Connected Services: ${context.connectedServices?.join(', ') || 'None'}
1239+
- Recent Chat History: ${context.orchestratorHistory?.map(h => `${h.role}: ${h.parts[0]?.text}`).slice(-3).join('\n') || 'No recent messages'}
1240+
1241+
Generate a workflow that would be helpful for this context.`;
1242+
1243+
// Use the chat endpoint internally to generate the workflow
1244+
const chatResponse = await fetch(`${req.protocol}://${req.get('host')}/api/chat`, {
1245+
method: 'POST',
1246+
headers: {
1247+
'Content-Type': 'application/json',
1248+
'Cookie': req.headers.cookie || ''
1249+
},
1250+
body: JSON.stringify({
1251+
model: 'gemini-2.5-flash',
1252+
messages: [
1253+
{ role: 'system', content: systemPrompt },
1254+
{ role: 'user', content: userPrompt }
1255+
]
1256+
})
1257+
});
1258+
1259+
if (!chatResponse.ok) {
1260+
throw new Error('Failed to generate workflow via AI');
1261+
}
1262+
1263+
const aiResponse = await chatResponse.text();
1264+
1265+
// Try to extract JSON from the response
1266+
let workflowData;
1267+
try {
1268+
const jsonMatch = aiResponse.match(/```json\n([\s\S]*?)\n```/) || aiResponse.match(/\{[\s\S]*\}/);
1269+
if (jsonMatch) {
1270+
workflowData = JSON.parse(jsonMatch[1] || jsonMatch[0]);
1271+
} else {
1272+
throw new Error('No JSON found in AI response');
1273+
}
1274+
} catch (parseError) {
1275+
// Fallback: create a simple default workflow
1276+
workflowData = {
1277+
title: `Workflow for ${context.activeApp || 'Current Context'}`,
1278+
nodes: [
1279+
{
1280+
id: 'start-1',
1281+
type: 'default',
1282+
position: { x: 100, y: 100 },
1283+
data: { label: 'Start', description: 'Generated workflow starting point' }
1284+
},
1285+
{
1286+
id: 'context-1',
1287+
type: 'default',
1288+
position: { x: 300, y: 100 },
1289+
data: { label: 'Current Context', description: `Working with ${context.activeApp}` }
1290+
}
1291+
],
1292+
edges: [
1293+
{ id: 'e1-2', source: 'start-1', target: 'context-1' }
1294+
]
1295+
};
1296+
}
1297+
1298+
res.json(workflowData);
1299+
1300+
} catch (error) {
1301+
logger.error('Workflow generation failed:', error);
1302+
res.status(500).json({ error: 'Failed to generate workflow' });
1303+
}
1304+
});
1305+
12131306
// Universal endpoint to get available models from all connected services
12141307
app.get('/api/models', requireAuth, async (req, res) => {
12151308
try {
@@ -1277,6 +1370,87 @@ app.get('/api/models', requireAuth, async (req, res) => {
12771370
}
12781371
}
12791372

1373+
// Hugging Face models (if connected)
1374+
if (connections.huggingface?.apiKey) {
1375+
availableModels.push(
1376+
{ id: 'microsoft/DialoGPT-medium', name: 'DialoGPT Medium', provider: 'Hugging Face', category: 'text', available: true },
1377+
{ id: 'facebook/blenderbot-400M-distill', name: 'BlenderBot 400M', provider: 'Hugging Face', category: 'text', available: true },
1378+
{ id: 'microsoft/CodeBERT-base', name: 'CodeBERT Base', provider: 'Hugging Face', category: 'code', available: true }
1379+
);
1380+
}
1381+
1382+
// Replicate models (if connected)
1383+
if (connections.replicate?.apiKey) {
1384+
availableModels.push(
1385+
{ id: 'meta/llama-2-70b-chat', name: 'Llama 2 70B Chat', provider: 'Replicate', category: 'text', available: true },
1386+
{ id: 'stability-ai/stable-diffusion', name: 'Stable Diffusion', provider: 'Replicate', category: 'image', available: true },
1387+
{ id: 'replicate/musicgen', name: 'MusicGen', provider: 'Replicate', category: 'audio', available: true }
1388+
);
1389+
}
1390+
1391+
// Together AI models (if connected)
1392+
if (connections.together?.apiKey) {
1393+
availableModels.push(
1394+
{ id: 'togethercomputer/llama-2-70b-chat', name: 'Llama 2 70B Chat', provider: 'Together AI', category: 'text', available: true },
1395+
{ id: 'togethercomputer/falcon-40b-instruct', name: 'Falcon 40B Instruct', provider: 'Together AI', category: 'text', available: true },
1396+
{ id: 'togethercomputer/RedPajama-INCITE-7B-Chat', name: 'RedPajama 7B Chat', provider: 'Together AI', category: 'text', available: true }
1397+
);
1398+
}
1399+
1400+
// Mistral AI models (if connected)
1401+
if (connections.mistral?.apiKey) {
1402+
availableModels.push(
1403+
{ id: 'mistral-large-latest', name: 'Mistral Large', provider: 'Mistral AI', category: 'text', available: true },
1404+
{ id: 'mistral-medium-latest', name: 'Mistral Medium', provider: 'Mistral AI', category: 'text', available: true },
1405+
{ id: 'mistral-small-latest', name: 'Mistral Small', provider: 'Mistral AI', category: 'text', available: true }
1406+
);
1407+
}
1408+
1409+
// Cohere models (if connected)
1410+
if (connections.cohere?.apiKey) {
1411+
availableModels.push(
1412+
{ id: 'command', name: 'Command', provider: 'Cohere', category: 'text', available: true },
1413+
{ id: 'command-light', name: 'Command Light', provider: 'Cohere', category: 'text', available: true },
1414+
{ id: 'summarize-xlarge', name: 'Summarize XLarge', provider: 'Cohere', category: 'text', available: true }
1415+
);
1416+
}
1417+
1418+
// vLLM models (if connected)
1419+
if (connections.vllm?.url) {
1420+
availableModels.push(
1421+
{ id: 'vllm-hosted-model', name: 'vLLM Hosted Model', provider: 'vLLM', category: 'text', available: true }
1422+
);
1423+
}
1424+
1425+
// LocalAI models (if connected)
1426+
if (connections.localai?.url) {
1427+
availableModels.push(
1428+
{ id: 'localai-model', name: 'LocalAI Model', provider: 'LocalAI', category: 'text', available: true }
1429+
);
1430+
}
1431+
1432+
// Stability AI models (if connected)
1433+
if (connections.stability?.apiKey) {
1434+
availableModels.push(
1435+
{ id: 'stable-diffusion-xl-1024-v1-0', name: 'SDXL 1.0', provider: 'Stability AI', category: 'image', available: true },
1436+
{ id: 'stable-diffusion-v1-6', name: 'Stable Diffusion 1.6', provider: 'Stability AI', category: 'image', available: true }
1437+
);
1438+
}
1439+
1440+
// Midjourney models (if connected)
1441+
if (connections.midjourney?.apiKey) {
1442+
availableModels.push(
1443+
{ id: 'midjourney-v6', name: 'Midjourney v6', provider: 'Midjourney', category: 'image', available: true }
1444+
);
1445+
}
1446+
1447+
// Runway models (if connected)
1448+
if (connections.runway?.apiKey) {
1449+
availableModels.push(
1450+
{ id: 'runway-gen3', name: 'Runway Gen-3', provider: 'Runway ML', category: 'video', available: true }
1451+
);
1452+
}
1453+
12801454
res.json({ models: availableModels });
12811455
} catch (error) {
12821456
logger.error('Error fetching available models:', { errorMessage: error.message });

src/components/FloatingOrchestrator.jsx

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import useStore from '../lib/store';
88
import { sendMessageToOrchestrator } from '../lib/actions';
99
import { personalities } from '../lib/assistant/personalities';
1010
import { modulesBySemester } from '../lib/modules';
11+
import { extractWorkflowContext } from '../lib/contextExtraction';
1112
import '../styles/components/floating-orchestrator.css';
1213

1314
const AgentTask = ({ message }) => (
@@ -48,6 +49,7 @@ export default function FloatingOrchestrator() {
4849

4950
const [input, setInput] = useState('');
5051
const [isMinimized, setIsMinimized] = useState(false);
52+
const [autoMinimize, setAutoMinimize] = useState(true);
5153
const [isDragging, setIsDragging] = useState(false);
5254
const [isResizing, setIsResizing] = useState(false);
5355
const [dragStart, setDragStart] = useState({ x: 0, y: 0 });
@@ -178,6 +180,70 @@ User Message: ${message}`;
178180
sendMessageToOrchestrator(enhancedMessage, { enableThinking, thinkingBudget });
179181
}, [isContextAware, gatherAppContext, enableThinking, thinkingBudget]);
180182

183+
// Generate workflow from current application context
184+
const generateWorkflowFromContext = useCallback(async () => {
185+
try {
186+
// Use the new context extraction utility
187+
const context = extractWorkflowContext();
188+
189+
// Add to chat history to show user what's happening
190+
useStore.setState(state => {
191+
state.orchestratorHistory.push({
192+
role: 'user',
193+
parts: [{ text: '/generate_workflow' }]
194+
}, {
195+
role: 'assistant',
196+
parts: [{ text: '🔄 Generating workflow from current application context...' }]
197+
});
198+
});
199+
200+
// Call the backend to generate workflow
201+
const response = await fetch('/api/planner/generate-from-context', {
202+
method: 'POST',
203+
headers: {
204+
'Content-Type': 'application/json',
205+
'Accept': 'application/json'
206+
},
207+
credentials: 'include',
208+
body: JSON.stringify({ context })
209+
});
210+
211+
if (response.ok) {
212+
const { nodes, edges, title } = await response.json();
213+
214+
// Set the generated workflow in the planner
215+
const actions = useStore.getState().actions;
216+
if (actions?.setPlannerGraph) {
217+
actions.setPlannerGraph({ nodes, edges, title: title || 'AI Generated Workflow' });
218+
}
219+
220+
// Switch to planner view
221+
if (actions?.setActiveApp) {
222+
actions.setActiveApp('planner');
223+
}
224+
225+
// Update chat history with success
226+
useStore.setState(state => {
227+
state.orchestratorHistory.push({
228+
role: 'assistant',
229+
parts: [{ text: `✅ Generated workflow: "${title || 'AI Generated Workflow'}" with ${nodes.length} nodes and ${edges.length} connections. Switching to Planner view.` }]
230+
});
231+
});
232+
} else {
233+
const error = await response.text();
234+
throw new Error(error);
235+
}
236+
} catch (error) {
237+
console.error('Failed to generate workflow:', error);
238+
useStore.setState(state => {
239+
state.orchestratorHistory.push({
240+
role: 'assistant',
241+
parts: [{ text: `❌ Failed to generate workflow: ${error.message}` }]
242+
});
243+
});
244+
}
245+
}, []);
246+
181247
// Function to start a new chat
182248
const handleNewChat = useCallback(() => {
183249
if (window.confirm('Start a new chat? This will clear the current conversation.')) {
@@ -203,6 +269,17 @@ User Message: ${message}`;
203269
}
204270
}, [history, isLoading, isMinimized]);
205271

272+
// Auto-minimize after inactivity
273+
useEffect(() => {
274+
if (!autoMinimize || isMinimized || isDragging) return;
275+
276+
const timer = setTimeout(() => {
277+
setIsMinimized(true);
278+
}, 30000); // 30 seconds of inactivity
279+
280+
return () => clearTimeout(timer);
281+
}, [autoMinimize, isMinimized, isDragging, history, input]); // Reset timer on any activity
282+
206283
// Close dropdowns when clicking outside
207284
useEffect(() => {
208285
const handleClickOutside = (event) => {
@@ -338,6 +415,14 @@ User Message: ${message}`;
338415
const handleSubmit = (e) => {
339416
e.preventDefault();
340417
if (!input.trim()) return;
418+
419+
// Handle special commands
420+
if (input.startsWith('/generate_workflow')) {
421+
generateWorkflowFromContext();
422+
setInput('');
423+
return;
424+
}
425+
341426
sendContextualMessage(input);
342427
setInput('');
343428
};

0 commit comments

Comments
 (0)