Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 16 additions & 4 deletions webview-ui/src/components/chat/ChatView.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -589,13 +589,14 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
* Handles sending messages to the extension
* @param text - The message text to send
* @param images - Array of image data URLs to send with the message
* @param fromQueue - Whether this message is being sent from the queue (should not clear input)
*/
const handleSendMessage = useCallback(
(text: string, images: string[]) => {
(text: string, images: string[], fromQueue: boolean = false) => {
text = text.trim()

if (text || images.length > 0) {
if (sendingDisabled) {
if (sendingDisabled && !fromQueue) {
Copy link

Copilot AI Sep 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This condition prevents queued messages from being processed when sending is disabled. Queued messages should be able to be sent even when sendingDisabled is true, since they were already queued when sending was enabled.

Copilot uses AI. Check for mistakes.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this the intended behavior? The condition if (sendingDisabled && !fromQueue) means that when fromQueue is true, the message will proceed to be sent even if sendingDisabled is true. This allows queued messages to bypass the sending disabled state. Could you confirm this is the desired behavior for your use case?

try {
console.log("queueMessage", text, images)
vscode.postMessage({ type: "queueMessage", text, images })
Expand Down Expand Up @@ -648,7 +649,11 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
vscode.postMessage({ type: "askResponse", askResponse: "messageResponse", text, images })
}

handleChatReset()
// Only reset the chat if this is not a queued message being processed
// When fromQueue is true, we preserve the current input value
if (!fromQueue) {
handleChatReset()
}
}
},
[handleChatReset, markFollowUpAsAnswered, sendingDisabled], // messagesRef and clineAskRef are stable
Expand Down Expand Up @@ -809,7 +814,13 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
handleChatReset()
break
case "sendMessage":
handleSendMessage(message.text ?? "", message.images ?? [])
// Check if this message matches any queued message to determine if it's from the queue
const isFromQueue = messageQueue.some(
(queuedMsg) =>
queuedMsg.text === message.text &&
JSON.stringify(queuedMsg.images || []) === JSON.stringify(message.images || []),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree with the previous comment - using JSON.stringify for array comparison is inefficient, especially for large images. Consider using a more efficient array comparison method:

Suggested change
JSON.stringify(queuedMsg.images || []) === JSON.stringify(message.images || []),
const isFromQueue = messageQueue.some(
(queuedMsg) =>
queuedMsg.text === message.text &&
queuedMsg.images?.length === message.images?.length &&
queuedMsg.images?.every((img, idx) => img === message.images?.[idx])
)

Or better yet, use a deep equality utility like lodash's isEqual.

)
Comment on lines +818 to +822
Copy link

Copilot AI Sep 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using JSON.stringify for array comparison is inefficient and could be problematic for large images. Consider using a more efficient array comparison method or a deep equality check.

Copilot uses AI. Check for mistakes.
handleSendMessage(message.text ?? "", message.images ?? [], isFromQueue)
break
case "setChatBoxMessage":
handleSetChatBoxMessage(message.text ?? "", message.images ?? [])
Expand Down Expand Up @@ -846,6 +857,7 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
handleSetChatBoxMessage,
handlePrimaryButtonClick,
handleSecondaryButtonClick,
messageQueue,
],
)

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
import React from "react"
import { render, waitFor } from "@testing-library/react"
import userEvent from "@testing-library/user-event"
import { vi } from "vitest"
import { QueryClient, QueryClientProvider } from "@tanstack/react-query"
import "@testing-library/jest-dom"

// Mock dependencies before importing components
vi.mock("@src/utils/vscode", () => ({
vscode: {
postMessage: vi.fn(),
},
}))

vi.mock("use-sound", () => ({
default: () => [vi.fn()],
}))

// Mock the extension state hook
vi.mock("@src/context/ExtensionStateContext", async () => {
const actual = await vi.importActual("@src/context/ExtensionStateContext")
return {
...actual,
useExtensionState: vi.fn(() => ({
clineMessages: [],
taskHistory: [],
apiConfiguration: { apiProvider: "test" },
messageQueue: [],
mode: "code",
customModes: [],
setMode: vi.fn(),
})),
}
})

// Now import components after all mocks are set up
import ChatView from "../ChatView"
import { ExtensionStateContextProvider, useExtensionState } from "@src/context/ExtensionStateContext"
import { vscode } from "@src/utils/vscode"

// Set up global mock
;(global as any).acquireVsCodeApi = () => ({
postMessage: vi.fn(),
getState: () => ({}),
setState: vi.fn(),
})

const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
})

const renderChatView = () => {
return render(
<ExtensionStateContextProvider>
<QueryClientProvider client={queryClient}>
<ChatView isHidden={false} showAnnouncement={false} hideAnnouncement={vi.fn()} />
</QueryClientProvider>
</ExtensionStateContextProvider>,
)
}

describe("ChatView - Queued Messages", () => {
beforeEach(() => {
vi.clearAllMocks()
})

it("should preserve input text when processing queued messages", async () => {
// Mock the state with a queued message
const mockUseExtensionState = useExtensionState as any
mockUseExtensionState.mockReturnValue({
clineMessages: [],
taskHistory: [],
apiConfiguration: { apiProvider: "test" },
messageQueue: [
{
id: "queue-1",
text: "Queued message",
images: [],
timestamp: Date.now(),
},
],
mode: "code",
customModes: [],
setMode: vi.fn(),
})

const { container } = renderChatView()

// Find the textarea
const textarea = container.querySelector("textarea") as HTMLTextAreaElement
expect(textarea).toBeTruthy()

// User types new text while message is queued
await userEvent.type(textarea, "New text typed by user")
expect(textarea.value).toBe("New text typed by user")

// Simulate backend processing the queued message by sending invoke message
const invokeMessage = new MessageEvent("message", {
data: {
type: "invoke",
invoke: "sendMessage",
text: "Queued message",
images: [],
},
})
window.dispatchEvent(invokeMessage)

// Wait for any async operations
await waitFor(() => {
// The input should still contain the user's typed text
expect(textarea.value).toBe("New text typed by user")
})

// Verify the queued message was sent
expect(vscode.postMessage).toHaveBeenCalledWith(
expect.objectContaining({
type: expect.stringMatching(/newTask|askResponse/),
}),
)
})

it("should clear input when sending a regular message (not from queue)", async () => {
// Mock the state with no queued messages
const mockUseExtensionState = useExtensionState as any
mockUseExtensionState.mockReturnValue({
clineMessages: [],
taskHistory: [],
apiConfiguration: { apiProvider: "test" },
messageQueue: [], // No queued messages
mode: "code",
customModes: [],
setMode: vi.fn(),
})

const { container } = renderChatView()

// Find the textarea
const textarea = container.querySelector("textarea") as HTMLTextAreaElement
expect(textarea).toBeTruthy()

// User types text
await userEvent.type(textarea, "Regular message")
expect(textarea.value).toBe("Regular message")

// Simulate backend sending invoke message for a non-queued message
const invokeMessage = new MessageEvent("message", {
data: {
type: "invoke",
invoke: "sendMessage",
text: "Different message not in queue",
images: [],
},
})
window.dispatchEvent(invokeMessage)

// Wait for any async operations
await waitFor(() => {
// The input should be cleared since this is not a queued message
expect(textarea.value).toBe("")
})
})

it("should handle messages with images correctly", async () => {
// Mock the state with a queued message with image
const mockUseExtensionState = useExtensionState as any
mockUseExtensionState.mockReturnValue({
clineMessages: [],
taskHistory: [],
apiConfiguration: { apiProvider: "test" },
messageQueue: [
{
id: "queue-2",
text: "Message with image",
images: ["data:image/png;base64,abc123"],
timestamp: Date.now(),
},
],
mode: "code",
customModes: [],
setMode: vi.fn(),
})

const { container } = renderChatView()

// Find the textarea
const textarea = container.querySelector("textarea") as HTMLTextAreaElement
expect(textarea).toBeTruthy()

// User types new text
await userEvent.type(textarea, "User typing while image message queued")
expect(textarea.value).toBe("User typing while image message queued")

// Simulate backend processing the queued message with image
const invokeMessage = new MessageEvent("message", {
data: {
type: "invoke",
invoke: "sendMessage",
text: "Message with image",
images: ["data:image/png;base64,abc123"],
},
})
window.dispatchEvent(invokeMessage)

// Wait for any async operations
await waitFor(() => {
// The input should still contain the user's typed text
expect(textarea.value).toBe("User typing while image message queued")
})
})
})
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great test coverage! Consider adding a few more edge case tests:

  • Multiple queued messages being processed sequentially
  • Queue processing when sending becomes re-enabled
  • Race conditions between user input and queue processing

These would help ensure the feature remains robust as the codebase evolves.

Loading