diff --git a/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/EditEmbedModal/index.jsx b/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/EditEmbedModal/index.jsx
index ef615ab10dd..29d865fca62 100644
--- a/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/EditEmbedModal/index.jsx
+++ b/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/EditEmbedModal/index.jsx
@@ -94,6 +94,12 @@ export default function EditEmbedModal({ embed, closeModal }) {
hint="Allow setting of the system prompt to override the workspace default."
defaultValue={embed.allow_prompt_override}
/>
+
Error: {error}
}diff --git a/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/index.jsx b/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/index.jsx index 10fa42db1ac..703f4d9d94d 100644 --- a/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/index.jsx +++ b/frontend/src/pages/GeneralSettings/ChatEmbedWidgets/EmbedConfigs/EmbedRow/index.jsx @@ -74,9 +74,17 @@ export default function EmbedRow({ embed }) { href={paths.workspace.chat(embed.workspace.slug)} target="_blank" rel="noreferrer" - className="text-white flex items-center hover:underline" + className="text-white flex items-center hover:underline gap-x-2" > {embed.workspace.name} + {embed.allow_agent && ( + + @ + + )}
Error: {error}
}@@ -354,7 +361,7 @@ export const BooleanInput = ({ name, title, hint, defaultValue = null }) => { setStatus(!status)} + onChange={() => setStatus(!status)} checked={status} className="peer sr-only pointer-events-none" /> diff --git a/server/__tests__/utils/chats/embed.test.js b/server/__tests__/utils/chats/embed.test.js new file mode 100644 index 00000000000..e32337f2fb5 --- /dev/null +++ b/server/__tests__/utils/chats/embed.test.js @@ -0,0 +1,422 @@ +const { streamChatWithForEmbed } = require("../../../utils/chats/embed"); +const { EphemeralAgentHandler, EphemeralEventListener } = require("../../../utils/agents/ephemeral"); +const { EmbedChats } = require("../../../models/embedChats"); +const { Telemetry } = require("../../../models/telemetry"); +const { writeResponseChunk } = require("../../../utils/helpers/chat/responses"); + +// Mock all external dependencies +jest.mock("../../../utils/agents/ephemeral"); +jest.mock("../../../models/embedChats"); +jest.mock("../../../models/telemetry"); +jest.mock("../../../utils/helpers/chat/responses"); +jest.mock("../../../utils/helpers", () => ({ + getVectorDbClass: jest.fn(), + getLLMProvider: jest.fn(), +})); +jest.mock("../../../utils/DocumentManager", () => ({ + DocumentManager: jest.fn().mockImplementation(() => ({ + pinnedDocs: jest.fn().mockResolvedValue([]), + })), +})); +jest.mock("uuid", () => ({ + v4: jest.fn(() => "test-uuid-123"), +})); + +// Mock environment variables +process.env.STORAGE_DIR = "/tmp/test-storage"; + +describe("streamChatWithForEmbed", () => { + let mockResponse; + let mockEmbed; + let mockEventListener; + let mockAgentHandler; + + beforeEach(() => { + // Reset all mocks + jest.clearAllMocks(); + + // Mock response object + mockResponse = { + locals: { + connection: { + host: "test.com", + ip: "127.0.0.1", + }, + }, + }; + + // Mock embed config + mockEmbed = { + id: 1, + chat_mode: "chat", + allow_model_override: false, + allow_temperature_override: false, + allow_prompt_override: false, + allow_agent: false, + workspace: { + id: 1, + slug: "test-workspace", + agentProvider: "openai", + agentModel: "gpt-4", + openAiPrompt: "Default prompt", + openAiTemp: 0.7, + }, + }; + + // Mock agent handler and event listener + mockAgentHandler = { + init: jest.fn().mockResolvedValue({}), + createAIbitat: jest.fn().mockResolvedValue({}), + startAgentCluster: jest.fn().mockReturnValue({}), + }; + + mockEventListener = { + streamAgentEvents: jest.fn().mockResolvedValue({ + thoughts: ["thinking about the problem"], + textResponse: "Agent response", + }), + }; + + EphemeralAgentHandler.mockImplementation(() => mockAgentHandler); + EphemeralEventListener.mockImplementation(() => mockEventListener); + EphemeralAgentHandler.isAgentInvocation = jest.fn(); + + // Mock other dependencies + EmbedChats.new = jest.fn().mockResolvedValue({ id: 1 }); + Telemetry.sendTelemetry = jest.fn().mockResolvedValue({}); + writeResponseChunk.mockImplementation(() => {}); + }); + + describe("Agent Detection and Handling", () => { + test("should detect agent invocation and handle agent request", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockEmbed.workspace.agentProvider = "openai"; + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent help me", + "session-123", + { username: "testuser" } + ); + + // Assert + expect(EphemeralAgentHandler.isAgentInvocation).toHaveBeenCalledWith({ + message: "@agent help me", + }); + expect(Telemetry.sendTelemetry).toHaveBeenCalledWith("agent_chat_started"); + expect(EphemeralAgentHandler).toHaveBeenCalledWith({ + uuid: expect.any(String), + workspace: mockEmbed.workspace, + prompt: "@agent help me", + userId: null, + threadId: null, + sessionId: "session-123", + }); + }); + + test("should initialize agent handler correctly", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + {} + ); + + // Assert + expect(mockAgentHandler.init).toHaveBeenCalled(); + expect(mockAgentHandler.createAIbitat).toHaveBeenCalledWith({ + handler: expect.any(Object), + }); + expect(mockAgentHandler.startAgentCluster).toHaveBeenCalled(); + }); + + test("should stream agent events and save to embed chats", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + const mockUuid = "test-uuid"; + require("uuid").v4 = jest.fn().mockReturnValue(mockUuid); + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent help", + "session-123", + { username: "testuser" } + ); + + // Assert + expect(mockEventListener.streamAgentEvents).toHaveBeenCalledWith( + mockResponse, + mockUuid + ); + expect(EmbedChats.new).toHaveBeenCalledWith({ + embedId: mockEmbed.id, + prompt: "@agent help", + response: { + text: "Agent response", + type: "chat", + sources: [], + thoughts: ["thinking about the problem"], + }, + connection_information: { + host: "test.com", + ip: "127.0.0.1", + username: "testuser", + }, + sessionId: "session-123", + }); + }); + + test("should not trigger agent flow for non-agent messages", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(false); + + // Act - This would normally continue to regular chat flow, but we'll just test the agent check + try { + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "regular message", + "session-123", + {} + ); + } catch (error) { + // Expected to fail since we haven't mocked the full chat flow + } + + // Assert + expect(EphemeralAgentHandler.isAgentInvocation).toHaveBeenCalledWith({ + message: "regular message", + }); + expect(Telemetry.sendTelemetry).not.toHaveBeenCalledWith("agent_chat_started"); + expect(EphemeralAgentHandler).not.toHaveBeenCalled(); + }); + }); + + + describe("Other Configuration Overrides", () => { + test("should apply prompt override when allowed", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockEmbed.allow_prompt_override = true; + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + { + promptOverride: "Custom prompt", + } + ); + + // Assert + expect(mockEmbed.workspace.openAiPrompt).toBe("Custom prompt"); + }); + + test("should apply temperature override when allowed", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockEmbed.allow_temperature_override = true; + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + { + temperatureOverride: "0.9", + } + ); + + // Assert + expect(mockEmbed.workspace.openAiTemp).toBe(0.9); + }); + + test("should apply model override when allowed", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockEmbed.allow_model_override = true; + + // This test would need to mock the LLMProvider logic which happens later in the flow + // For now, we just test that the override parameter is accepted + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + { + modelOverride: "gpt-4-turbo", + } + ); + + // Assert - The modelOverride is used later in LLM provider setup + expect(EphemeralAgentHandler).toHaveBeenCalled(); + }); + }); + + describe("Connection Information Handling", () => { + test("should save connection info with username when provided", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + { username: "john_doe" } + ); + + // Assert + expect(EmbedChats.new).toHaveBeenCalledWith({ + embedId: mockEmbed.id, + prompt: "@agent test", + response: expect.any(Object), + connection_information: { + host: "test.com", + ip: "127.0.0.1", + username: "john_doe", + }, + sessionId: "session-123", + }); + }); + + test("should save connection info without username when not provided", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + {} + ); + + // Assert + expect(EmbedChats.new).toHaveBeenCalledWith({ + embedId: mockEmbed.id, + prompt: "@agent test", + response: expect.any(Object), + connection_information: { + host: "test.com", + ip: "127.0.0.1", + username: null, + }, + sessionId: "session-123", + }); + }); + + test("should handle missing connection info gracefully", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockResponse.locals.connection = null; + + // Act + await streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + { username: "testuser" } + ); + + // Assert + expect(EmbedChats.new).toHaveBeenCalledWith({ + embedId: mockEmbed.id, + prompt: "@agent test", + response: expect.any(Object), + connection_information: { username: "testuser" }, + sessionId: "session-123", + }); + }); + }); + + describe("Error Handling", () => { + test("should handle agent handler initialization failure", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockAgentHandler.init.mockRejectedValue(new Error("Init failed")); + + // Act & Assert + await expect( + streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + {} + ) + ).rejects.toThrow("Init failed"); + }); + + test("should handle event listener streaming failure", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + mockEventListener.streamAgentEvents.mockRejectedValue( + new Error("Streaming failed") + ); + + // Act & Assert + await expect( + streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + {} + ) + ).rejects.toThrow("Streaming failed"); + }); + + test("should handle embed chat save failure", async () => { + // Arrange + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + EmbedChats.new.mockRejectedValue(new Error("DB save failed")); + + // Act & Assert + await expect( + streamChatWithForEmbed( + mockResponse, + mockEmbed, + "@agent test", + "session-123", + {} + ) + ).rejects.toThrow("DB save failed"); + }); + }); +}); + +describe("Agent Detection Utility Tests", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + test("should correctly identify agent messages", () => { + // These tests would normally test the actual WorkspaceAgentInvocation.parseAgents + // but since we're mocking EphemeralAgentHandler.isAgentInvocation, we test our usage + + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(true); + const result = EphemeralAgentHandler.isAgentInvocation({ message: "@agent hello" }); + expect(result).toBe(true); + + EphemeralAgentHandler.isAgentInvocation.mockReturnValue(false); + const result2 = EphemeralAgentHandler.isAgentInvocation({ message: "hello world" }); + expect(result2).toBe(false); + }); +}); diff --git a/server/models/embedConfig.js b/server/models/embedConfig.js index 202c5a68fbd..94bfc7a6f91 100644 --- a/server/models/embedConfig.js +++ b/server/models/embedConfig.js @@ -10,6 +10,7 @@ const EmbedConfig = { "allow_model_override", "allow_temperature_override", "allow_prompt_override", + "allow_agent", "max_chats_per_day", "max_chats_per_session", "chat_mode", @@ -40,6 +41,7 @@ const EmbedConfig = { data?.allow_prompt_override, "allow_prompt_override" ), + allow_agent: validatedCreationData(data?.allow_agent, "allow_agent"), max_chats_per_day: validatedCreationData( data?.max_chats_per_day, "max_chats_per_day" @@ -188,6 +190,7 @@ const BOOLEAN_KEYS = [ "allow_model_override", "allow_temperature_override", "allow_prompt_override", + "allow_agent", "enabled", ]; diff --git a/server/prisma/migrations/20250929000000_init/migration.sql b/server/prisma/migrations/20250929000000_init/migration.sql new file mode 100644 index 00000000000..8be0fa7dc54 --- /dev/null +++ b/server/prisma/migrations/20250929000000_init/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "embed_configs" ADD COLUMN "allow_agent" BOOLEAN NOT NULL DEFAULT false; diff --git a/server/prisma/schema.prisma b/server/prisma/schema.prisma index a3db69f1e2b..2e3d4699668 100644 --- a/server/prisma/schema.prisma +++ b/server/prisma/schema.prisma @@ -242,6 +242,7 @@ model embed_configs { allow_model_override Boolean @default(false) allow_temperature_override Boolean @default(false) allow_prompt_override Boolean @default(false) + allow_agent Boolean @default(false) max_chats_per_day Int? max_chats_per_session Int? message_limit Int? @default(20) diff --git a/server/utils/chats/embed.js b/server/utils/chats/embed.js index d9320241b35..af9fa86fa0c 100644 --- a/server/utils/chats/embed.js +++ b/server/utils/chats/embed.js @@ -7,6 +7,11 @@ const { writeResponseChunk, } = require("../helpers/chat/responses"); const { DocumentManager } = require("../DocumentManager"); +const { + EphemeralAgentHandler, + EphemeralEventListener, +} = require("../agents/ephemeral"); +const { Telemetry } = require("../../models/telemetry"); async function streamChatWithForEmbed( response, @@ -28,6 +33,51 @@ async function streamChatWithForEmbed( embed.workspace.openAiTemp = parseFloat(temperatureOverride); const uuid = uuidv4(); + + // Check for agent invocation - if message starts with @agent, handle as agent request + if (EphemeralAgentHandler.isAgentInvocation({ message })) { + await Telemetry.sendTelemetry("agent_chat_started"); + + // Initialize the EphemeralAgentHandler to handle agent conversation over HTTP + const agentHandler = new EphemeralAgentHandler({ + uuid, + workspace: embed.workspace, + prompt: message, + userId: null, // Embed users are anonymous + threadId: null, // Embeds don't support threads + sessionId, + }); + + // Establish event listener that emulates websocket calls in Aibitat + // so that we can keep the same interface but use HTTP for embed + const eventListener = new EphemeralEventListener(); + await agentHandler.init(); + await agentHandler.createAIbitat({ handler: eventListener }); + agentHandler.startAgentCluster(); + + // Stream back agent events and save to embed chat history + return eventListener + .streamAgentEvents(response, uuid) + .then(async ({ thoughts, textResponse }) => { + await EmbedChats.new({ + embedId: embed.id, + prompt: message, + response: { + text: textResponse, + type: chatMode, + sources: [], + thoughts, + }, + connection_information: response.locals.connection + ? { + ...response.locals.connection, + username: username ? String(username) : null, + } + : { username: username ? String(username) : null }, + sessionId, + }); + }); + } const LLMConnector = getLLMProvider({ provider: embed?.workspace?.chatProvider, model: chatModel ?? embed.workspace?.chatModel, diff --git a/server/utils/middleware/embedMiddleware.js b/server/utils/middleware/embedMiddleware.js index b8f29992998..d6a63f61baa 100644 --- a/server/utils/middleware/embedMiddleware.js +++ b/server/utils/middleware/embedMiddleware.js @@ -3,6 +3,7 @@ const { VALID_CHAT_MODE } = require("../chats/stream"); const { EmbedChats } = require("../../models/embedChats"); const { EmbedConfig } = require("../../models/embedConfig"); const { reqBody } = require("../http"); +const { EphemeralAgentHandler } = require("../agents/ephemeral"); // Finds or Aborts request for a /:embedId/ url. This should always // be the first middleware and the :embedID should be in the URL. @@ -104,6 +105,23 @@ async function canRespond(request, response, next) { return; } + // Check if this is an agent request and if the workspace has agent configuration + if (EphemeralAgentHandler.isAgentInvocation({ message })) { + if (!embed.allow_agent) { + response.status(400).json({ + id: uuidv4(), + type: "abort", + textResponse: null, + sources: [], + close: true, + error: true, + errorMsg: + "Agent mode is not enabled for this chat. Contact the system administrator.", + }); + return; + } + } + if ( !isNaN(embed.max_chats_per_day) && Number(embed.max_chats_per_day) > 0