Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 34 additions & 1 deletion src/ChatView.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { ItemView, WorkspaceLeaf } from 'obsidian'
import { ItemView, TFile, WorkspaceLeaf } from 'obsidian'
import React from 'react'
import { Root, createRoot } from 'react-dom/client'

Expand Down Expand Up @@ -28,6 +28,8 @@ export class ChatView extends ItemView {
) {
super(leaf)
this.initialChatProps = plugin.initialChatProps
// Store reference to this view in the plugin
plugin.chatView = this
}

getViewType() {
Expand All @@ -50,6 +52,9 @@ export class ChatView extends ItemView {
}

async onClose() {
if (this.plugin.chatView === this) {
this.plugin.chatView = null
}
this.root?.unmount()
}

Expand Down Expand Up @@ -126,4 +131,32 @@ export class ChatView extends ItemView {
focusMessage() {
this.chatRef.current?.focusMessage()
}

/**
* Loads a specific conversation by ID
*/
loadConversation(conversationId: string) {
return this.chatRef.current?.loadConversation(conversationId)
}

/**
* Creates a new chat with block data and automatically submits it
* @param blockData The text block to add to the chat
* @returns Promise that resolves to the conversation ID
*/
async createAndSubmitChatFromBlock(blockData: {
file: TFile
text: string
startLine: number
endLine: number
}): Promise<string | undefined> {
// Convert to MentionableBlockData format
const mentionableBlock: MentionableBlockData = {
file: blockData.file,
content: blockData.text,
startLine: blockData.startLine,
endLine: blockData.endLine,
}
return this.chatRef.current?.createAndSubmitChat(mentionableBlock)
}
}
58 changes: 58 additions & 0 deletions src/components/chat-view/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ export type ChatRef = {
openNewChat: (selectedBlock?: MentionableBlockData) => void
addSelectionToChat: (selectedBlock: MentionableBlockData) => void
focusMessage: () => void
loadConversation: (conversationId: string) => Promise<void>
createAndSubmitChat: (selectedBlock: MentionableBlockData) => Promise<string>
}

export type ChatProps = {
Expand Down Expand Up @@ -496,6 +498,62 @@ const Chat = forwardRef<ChatRef, ChatProps>((props, ref) => {
useImperativeHandle(ref, () => ({
openNewChat: (selectedBlock?: MentionableBlockData) =>
handleNewChat(selectedBlock),
loadConversation: async (conversationId: string) => {
return handleLoadConversation(conversationId)
},
createAndSubmitChat: async (
selectedBlock: MentionableBlockData,
): Promise<string> => {
// Create a new chat
handleNewChat(selectedBlock)

// Get the new conversation ID
const newConversationId = currentConversationId

// Create a new message with the selected block
const newMessage: ChatUserMessage = {
role: 'user',
content: null,
promptContent: null,
id: uuidv4(),
mentionables: [
{
type: 'current-file',
file: app.workspace.getActiveFile(),
},
{
type: 'block',
...selectedBlock,
},
],
}

// Compile and submit the message
const compiledMessage = await promptGenerator.compileUserMessagePrompt({
message: newMessage,
useVaultSearch: true,
onQueryProgressChange: setQueryProgress,
})

const compiledMessages = [
{
...newMessage,
promptContent: compiledMessage.promptContent,
similaritySearchResults: compiledMessage.similaritySearchResults,
},
]

// Update the chat messages
setChatMessages(compiledMessages)

// Submit the message
submitChatMutation.mutate({
chatMessages: compiledMessages,
conversationId: newConversationId,
})

return newConversationId
},
Comment on lines +505 to +556
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

⚠️ Potential issue

Conversation ID captured before state update – returned ID may be stale

handleNewChat(selectedBlock) triggers setCurrentConversationId(uuidv4()), but setState is asynchronous.
Immediately after the call you read currentConversationId into newConversationId, which still contains the old conversation id, so the method is very likely to return the wrong id and to submit the first message to the previous conversation.

-  // Create a new chat
-  handleNewChat(selectedBlock)
-
-  // Get the new conversation ID
-  const newConversationId = currentConversationId
+  // Create a new chat **and capture the newly-generated id**.
+  const newConversationId = uuidv4()
+  setCurrentConversationId(newConversationId)
+
+  // Reset chat state
+  handleNewChat(selectedBlock)   // adapt handleNewChat to accept an id or remove its own uuid generation

Consider returning the id from handleNewChat, or accepting the generated id as an argument, so that the value used everywhere is guaranteed to be the same.

Committable suggestion skipped: line range outside the PR's diff.

addSelectionToChat: (selectedBlock: MentionableBlockData) => {
const mentionable: Omit<MentionableBlock, 'id'> = {
type: 'block',
Expand Down
204 changes: 204 additions & 0 deletions src/core/chat/ChatService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
import { App, TFile } from 'obsidian'
import { v4 as uuidv4 } from 'uuid'

import { getChatModelClient } from '../../core/llm/manager'
import { McpManager } from '../../core/mcp/mcpManager'
import { RAGEngine } from '../../core/rag/ragEngine'
import { ChatManager } from '../../database/json/chat/ChatManager'
import { serializeChatMessage } from '../../hooks/useChatHistory'
import { SmartComposerSettings } from '../../settings/schema/setting.types'
import { ChatMessage, ChatUserMessage } from '../../types/chat'
import { MentionableBlockData } from '../../types/mentionable'
import { plainTextToEditorState } from '../../utils/chat/plain-text-to-editor-state'
import { PromptGenerator } from '../../utils/chat/promptGenerator'
import { ResponseGenerator } from '../../utils/chat/responseGenerator'

export type CreateChatOptions = {
blocks?: MentionableBlockData[]
file?: TFile | null
}

export class ChatService {
private readonly app: App
private readonly settings: SmartComposerSettings
private readonly chatManager: ChatManager
private readonly getRAGEngine: () => Promise<RAGEngine>
private readonly getMcpManager: () => Promise<McpManager>

/** Track when each conversation's initial streaming finishes. */
private readonly streamDoneMap = new Map<string, Promise<void>>()

constructor(params: {
app: App
settings: SmartComposerSettings
chatManager: ChatManager
getRAGEngine: () => Promise<RAGEngine>
getMcpManager: () => Promise<McpManager>
}) {
this.app = params.app
this.settings = params.settings
this.chatManager = params.chatManager
this.getRAGEngine = params.getRAGEngine
this.getMcpManager = params.getMcpManager
}

/**
* Create a chat in the background, submit the first user message, and start
* streaming a response. Returns the new conversation id immediately.
*/
public async createChat(
initialText: string,
opts: CreateChatOptions = {},
): Promise<string> {
const { blocks = [], file = this.app.workspace.getActiveFile() } = opts

// 1. Build first user message
const firstMessage: ChatUserMessage = {
role: 'user',
content: plainTextToEditorState(initialText),
promptContent: null,
id: uuidv4(),
mentionables: [
{ type: 'current-file', file },
...blocks.map((b) => ({ type: 'block' as const, ...b })),
],
}

// 2. Build PromptGenerator
const promptGenerator = new PromptGenerator(
this.getRAGEngine,
this.app,
this.settings,
)

type CompilePromptResult = Awaited<
ReturnType<PromptGenerator['compileUserMessagePrompt']>
>

let compiled: CompilePromptResult
try {
compiled = await promptGenerator.compileUserMessagePrompt({
message: firstMessage,
useVaultSearch: false, // avoid RAG unless user explicitly adds vault mentionable
})
} catch (error) {
// If RAG/embeddings fail due to missing API key, fall back to plain text prompt
const {
LLMAPIKeyNotSetException,
LLMAPIKeyInvalidException,
LLMBaseUrlNotSetException,
} = await import('../../core/llm/exception')

if (
error instanceof LLMAPIKeyNotSetException ||
error instanceof LLMAPIKeyInvalidException ||
error instanceof LLMBaseUrlNotSetException
) {
console.warn('Embeddings unavailable. Falling back to simple prompt.')
compiled = {
promptContent: [
{
type: 'text' as const,
text: `${firstMessage.content ? initialText : ''}`,
},
],
shouldUseRAG: false,
} as CompilePromptResult
} else {
throw error
}
}

const compiledMessages: ChatMessage[] = [
{
...firstMessage,
promptContent: compiled.promptContent,
similaritySearchResults: compiled.similaritySearchResults,
},
]

// 3. Create chat on disk
const title = initialText.trim().substring(0, 50) || 'New chat'
const chat = await this.chatManager.createChat({
title,
messages: compiledMessages.map(serializeChatMessage),
})

// 4. Kick off streaming in background (fire-and-forget)
const donePromise = this.startStreamingResponse({
conversationId: chat.id,
initialMessages: compiledMessages,
promptGenerator,
})

this.streamDoneMap.set(chat.id, donePromise)

Comment on lines +128 to +135
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

streamDoneMap is never cleaned – potential memory leak

You store every donePromise in streamDoneMap but never delete it once the
stream finishes. In a long-running session the map could grow indefinitely.

this.streamDoneMap.set(chat.id, donePromise)
donePromise.finally(() => {
  this.streamDoneMap.delete(chat.id)
})

return chat.id
}

/**
* Returns a promise that resolves when the assistant response for the
* given conversation finishes streaming. If no stream is running it
* resolves immediately.
*/
public waitUntilFinished(conversationId: string): Promise<void> {
return this.streamDoneMap.get(conversationId) ?? Promise.resolve()
}

private async startStreamingResponse({
conversationId,
initialMessages,
promptGenerator,
}: {
conversationId: string
initialMessages: ChatMessage[]
promptGenerator: PromptGenerator
}) {
try {
const { providerClient, model } = getChatModelClient({
settings: this.settings,
modelId: this.settings.chatModelId,
})

const mcpManager = await this.getMcpManager()

const responseGenerator = new ResponseGenerator({
providerClient,
model,
messages: initialMessages,
conversationId,
enableTools: this.settings.chatOptions.enableTools,
maxAutoIterations: this.settings.chatOptions.maxAutoIterations,
promptGenerator,
mcpManager,
})

let latestMessages: ChatMessage[] = [...initialMessages]

responseGenerator.subscribe(async (messages) => {
latestMessages = [...initialMessages, ...messages]
// Persist each update so UI sees progress
try {
await this.chatManager.updateChat(conversationId, {
messages: latestMessages.map(serializeChatMessage),
})
} catch (err) {
const typedErr = err as { code?: string } | undefined
// Rapid successive writes can make the previous file already
// gone when the internal delete runs; ignore that benign case.
if (typedErr?.code !== 'ENOENT') {
throw err
}
}
})

// Run without awaiting – but ensure we handle errors
return responseGenerator.run().catch((err) => {
console.error('ChatService stream error', err)
})
} catch (error) {
console.error('Failed to stream chat response', error)
return
}
}
}
4 changes: 3 additions & 1 deletion src/hooks/useChatHistory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,9 @@ export function useChatHistory(): UseChatHistory {
}
}

const serializeChatMessage = (message: ChatMessage): SerializedChatMessage => {
export const serializeChatMessage = (
message: ChatMessage,
): SerializedChatMessage => {
switch (message.role) {
case 'user':
return {
Expand Down
Loading