-
Notifications
You must be signed in to change notification settings - Fork 171
Expose API methods to create and open chats from other plugins #382
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
taydr
wants to merge
3
commits into
glowingjade:main
Choose a base branch
from
taydr:feature/obsidian-chat-api
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,204 @@ | ||
| import { App, TFile } from 'obsidian' | ||
| import { v4 as uuidv4 } from 'uuid' | ||
|
|
||
| import { getChatModelClient } from '../../core/llm/manager' | ||
| import { McpManager } from '../../core/mcp/mcpManager' | ||
| import { RAGEngine } from '../../core/rag/ragEngine' | ||
| import { ChatManager } from '../../database/json/chat/ChatManager' | ||
| import { serializeChatMessage } from '../../hooks/useChatHistory' | ||
| import { SmartComposerSettings } from '../../settings/schema/setting.types' | ||
| import { ChatMessage, ChatUserMessage } from '../../types/chat' | ||
| import { MentionableBlockData } from '../../types/mentionable' | ||
| import { plainTextToEditorState } from '../../utils/chat/plain-text-to-editor-state' | ||
| import { PromptGenerator } from '../../utils/chat/promptGenerator' | ||
| import { ResponseGenerator } from '../../utils/chat/responseGenerator' | ||
|
|
||
| export type CreateChatOptions = { | ||
| blocks?: MentionableBlockData[] | ||
| file?: TFile | null | ||
| } | ||
|
|
||
| export class ChatService { | ||
| private readonly app: App | ||
| private readonly settings: SmartComposerSettings | ||
| private readonly chatManager: ChatManager | ||
| private readonly getRAGEngine: () => Promise<RAGEngine> | ||
| private readonly getMcpManager: () => Promise<McpManager> | ||
|
|
||
| /** Track when each conversation's initial streaming finishes. */ | ||
| private readonly streamDoneMap = new Map<string, Promise<void>>() | ||
|
|
||
| constructor(params: { | ||
| app: App | ||
| settings: SmartComposerSettings | ||
| chatManager: ChatManager | ||
| getRAGEngine: () => Promise<RAGEngine> | ||
| getMcpManager: () => Promise<McpManager> | ||
| }) { | ||
| this.app = params.app | ||
| this.settings = params.settings | ||
| this.chatManager = params.chatManager | ||
| this.getRAGEngine = params.getRAGEngine | ||
| this.getMcpManager = params.getMcpManager | ||
| } | ||
|
|
||
| /** | ||
| * Create a chat in the background, submit the first user message, and start | ||
| * streaming a response. Returns the new conversation id immediately. | ||
| */ | ||
| public async createChat( | ||
| initialText: string, | ||
| opts: CreateChatOptions = {}, | ||
| ): Promise<string> { | ||
| const { blocks = [], file = this.app.workspace.getActiveFile() } = opts | ||
|
|
||
| // 1. Build first user message | ||
| const firstMessage: ChatUserMessage = { | ||
| role: 'user', | ||
| content: plainTextToEditorState(initialText), | ||
| promptContent: null, | ||
| id: uuidv4(), | ||
| mentionables: [ | ||
| { type: 'current-file', file }, | ||
| ...blocks.map((b) => ({ type: 'block' as const, ...b })), | ||
| ], | ||
| } | ||
|
|
||
| // 2. Build PromptGenerator | ||
| const promptGenerator = new PromptGenerator( | ||
| this.getRAGEngine, | ||
| this.app, | ||
| this.settings, | ||
| ) | ||
|
|
||
| type CompilePromptResult = Awaited< | ||
| ReturnType<PromptGenerator['compileUserMessagePrompt']> | ||
| > | ||
|
|
||
| let compiled: CompilePromptResult | ||
| try { | ||
| compiled = await promptGenerator.compileUserMessagePrompt({ | ||
| message: firstMessage, | ||
| useVaultSearch: false, // avoid RAG unless user explicitly adds vault mentionable | ||
| }) | ||
| } catch (error) { | ||
| // If RAG/embeddings fail due to missing API key, fall back to plain text prompt | ||
| const { | ||
| LLMAPIKeyNotSetException, | ||
| LLMAPIKeyInvalidException, | ||
| LLMBaseUrlNotSetException, | ||
| } = await import('../../core/llm/exception') | ||
|
|
||
| if ( | ||
| error instanceof LLMAPIKeyNotSetException || | ||
| error instanceof LLMAPIKeyInvalidException || | ||
| error instanceof LLMBaseUrlNotSetException | ||
| ) { | ||
| console.warn('Embeddings unavailable. Falling back to simple prompt.') | ||
| compiled = { | ||
| promptContent: [ | ||
| { | ||
| type: 'text' as const, | ||
| text: `${firstMessage.content ? initialText : ''}`, | ||
| }, | ||
| ], | ||
| shouldUseRAG: false, | ||
| } as CompilePromptResult | ||
| } else { | ||
| throw error | ||
| } | ||
| } | ||
|
|
||
| const compiledMessages: ChatMessage[] = [ | ||
| { | ||
| ...firstMessage, | ||
| promptContent: compiled.promptContent, | ||
| similaritySearchResults: compiled.similaritySearchResults, | ||
| }, | ||
| ] | ||
|
|
||
| // 3. Create chat on disk | ||
| const title = initialText.trim().substring(0, 50) || 'New chat' | ||
| const chat = await this.chatManager.createChat({ | ||
| title, | ||
| messages: compiledMessages.map(serializeChatMessage), | ||
| }) | ||
|
|
||
| // 4. Kick off streaming in background (fire-and-forget) | ||
| const donePromise = this.startStreamingResponse({ | ||
| conversationId: chat.id, | ||
| initialMessages: compiledMessages, | ||
| promptGenerator, | ||
| }) | ||
|
|
||
| this.streamDoneMap.set(chat.id, donePromise) | ||
|
|
||
|
Comment on lines
+128
to
+135
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion
You store every this.streamDoneMap.set(chat.id, donePromise)
donePromise.finally(() => {
this.streamDoneMap.delete(chat.id)
}) |
||
| return chat.id | ||
| } | ||
|
|
||
| /** | ||
| * Returns a promise that resolves when the assistant response for the | ||
| * given conversation finishes streaming. If no stream is running it | ||
| * resolves immediately. | ||
| */ | ||
| public waitUntilFinished(conversationId: string): Promise<void> { | ||
| return this.streamDoneMap.get(conversationId) ?? Promise.resolve() | ||
| } | ||
|
|
||
| private async startStreamingResponse({ | ||
| conversationId, | ||
| initialMessages, | ||
| promptGenerator, | ||
| }: { | ||
| conversationId: string | ||
| initialMessages: ChatMessage[] | ||
| promptGenerator: PromptGenerator | ||
| }) { | ||
| try { | ||
| const { providerClient, model } = getChatModelClient({ | ||
| settings: this.settings, | ||
| modelId: this.settings.chatModelId, | ||
| }) | ||
|
|
||
| const mcpManager = await this.getMcpManager() | ||
|
|
||
| const responseGenerator = new ResponseGenerator({ | ||
| providerClient, | ||
| model, | ||
| messages: initialMessages, | ||
| conversationId, | ||
| enableTools: this.settings.chatOptions.enableTools, | ||
| maxAutoIterations: this.settings.chatOptions.maxAutoIterations, | ||
| promptGenerator, | ||
| mcpManager, | ||
| }) | ||
|
|
||
| let latestMessages: ChatMessage[] = [...initialMessages] | ||
|
|
||
| responseGenerator.subscribe(async (messages) => { | ||
| latestMessages = [...initialMessages, ...messages] | ||
| // Persist each update so UI sees progress | ||
| try { | ||
| await this.chatManager.updateChat(conversationId, { | ||
| messages: latestMessages.map(serializeChatMessage), | ||
| }) | ||
| } catch (err) { | ||
| const typedErr = err as { code?: string } | undefined | ||
| // Rapid successive writes can make the previous file already | ||
| // gone when the internal delete runs; ignore that benign case. | ||
| if (typedErr?.code !== 'ENOENT') { | ||
| throw err | ||
| } | ||
| } | ||
| }) | ||
|
|
||
| // Run without awaiting – but ensure we handle errors | ||
| return responseGenerator.run().catch((err) => { | ||
| console.error('ChatService stream error', err) | ||
| }) | ||
| } catch (error) { | ||
| console.error('Failed to stream chat response', error) | ||
| return | ||
| } | ||
| } | ||
| } | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Conversation ID captured before state update – returned ID may be stale
handleNewChat(selectedBlock)triggerssetCurrentConversationId(uuidv4()), butsetStateis asynchronous.Immediately after the call you read
currentConversationIdintonewConversationId, which still contains the old conversation id, so the method is very likely to return the wrong id and to submit the first message to the previous conversation.Consider returning the id from
handleNewChat, or accepting the generated id as an argument, so that the value used everywhere is guaranteed to be the same.