Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
258 changes: 249 additions & 9 deletions webview-ui/src/components/chat/ChatView.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from "react"
import { useDeepCompareEffect, useEvent, useMount } from "react-use"
import debounce from "debounce"
import { Virtuoso, type VirtuosoHandle } from "react-virtuoso"
import { Virtuoso, type VirtuosoHandle, type ListRange } from "react-virtuoso"
import removeMd from "remove-markdown"
import { VSCodeButton } from "@vscode/webview-ui-toolkit/react"
import useSound from "use-sound"
Expand Down Expand Up @@ -189,6 +189,19 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
const userRespondedRef = useRef<boolean>(false)
const [currentFollowUpTs, setCurrentFollowUpTs] = useState<number | null>(null)

// Pagination state - Initialize to show last 20 messages
const [visibleRange, setVisibleRange] = useState(() => {
// Initialize with a default range that will be updated when messages load
return { start: 0, end: 20 }
})
const [isLoadingTop, setIsLoadingTop] = useState(false)
const [isLoadingBottom, setIsLoadingBottom] = useState(false)

// Buffer configuration
const VISIBLE_MESSAGE_COUNT = 20
const BUFFER_SIZE = 20
const LOAD_THRESHOLD = 5 // Load more when within 5 messages of edge

const clineAskRef = useRef(clineAsk)
useEffect(() => {
clineAskRef.current = clineAsk
Expand Down Expand Up @@ -452,6 +465,15 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
retryCountRef.current.clear()
}, [task?.ts])

// Separate effect to handle initial pagination setup when task changes
useEffect(() => {
if (task) {
// Reset pagination state when task changes
// The actual range will be set when messages are processed
setVisibleRange({ start: 0, end: VISIBLE_MESSAGE_COUNT })
}
}, [task?.ts, task, VISIBLE_MESSAGE_COUNT])

useEffect(() => {
if (isHidden) {
everVisibleMessagesTsRef.current.clear()
Expand Down Expand Up @@ -1306,6 +1328,214 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
return result
}, [isCondensing, visibleMessages])

// Update visible range when groupedMessages changes
useEffect(() => {
if (groupedMessages.length > 0) {
setVisibleRange((prev) => {
const totalMessages = groupedMessages.length

// If this looks like initial load (we're at default range and have many messages)
if (prev.start === 0 && prev.end === VISIBLE_MESSAGE_COUNT && totalMessages > VISIBLE_MESSAGE_COUNT) {
// Start at the bottom of the conversation
return {
start: Math.max(0, totalMessages - VISIBLE_MESSAGE_COUNT),
end: totalMessages,
}
}

// If we're already at the end and new messages arrived, adjust to include them
if (prev.end === totalMessages - 1 || (isAtBottom && totalMessages > prev.end)) {
return {
start: Math.max(0, totalMessages - VISIBLE_MESSAGE_COUNT),
end: totalMessages,
}
}

// Otherwise keep current range
return prev
})
}
}, [groupedMessages.length, isAtBottom])

// Message windowing logic
const windowedMessages = useMemo(() => {
const totalWindowSize = VISIBLE_MESSAGE_COUNT + BUFFER_SIZE * 2 // 60 messages total

// Handle small conversations (less than total window size)
if (groupedMessages.length <= totalWindowSize) {
return {
messages: groupedMessages,
startIndex: 0,
endIndex: groupedMessages.length,
totalCount: groupedMessages.length,
startPadding: 0,
endPadding: 0,
}
}

// Calculate the window with buffers
const bufferStart = Math.max(0, visibleRange.start - BUFFER_SIZE)
const bufferEnd = Math.min(groupedMessages.length, visibleRange.end + BUFFER_SIZE)

// Slice the messages array
const windowed = groupedMessages.slice(bufferStart, bufferEnd)

// Add placeholder items for virtualization
const startPadding = bufferStart
const endPadding = Math.max(0, groupedMessages.length - bufferEnd)

return {
messages: windowed,
startIndex: bufferStart,
endIndex: bufferEnd,
totalCount: groupedMessages.length,
startPadding,
endPadding,
}
}, [groupedMessages, visibleRange])

// Loading functions
const loadMoreMessagesTop = useCallback(() => {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's a potential race condition here with the loading states. If a user scrolls rapidly in both directions, multiple setTimeout callbacks could be queued, leading to inconsistent state. Consider using a ref to track the latest loading operation and cancel previous ones.

setIsLoadingTop(true)

// Simulate async loading with setTimeout (in real implementation, this would be instant)
setTimeout(() => {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the 100ms setTimeout intentional here? This adds artificial latency to the pagination. Since the messages are already in memory, the loading should be instant. Consider removing the setTimeout or making it configurable if it's needed for UX reasons.

setVisibleRange((prev) => ({
start: Math.max(0, prev.start - VISIBLE_MESSAGE_COUNT),
end: prev.end,
}))
setIsLoadingTop(false)
}, 100)
}, [])

const loadMoreMessagesBottom = useCallback(() => {
setIsLoadingBottom(true)

setTimeout(() => {
setVisibleRange((prev) => ({
start: prev.start,
end: Math.min(groupedMessages.length, prev.end + VISIBLE_MESSAGE_COUNT),
}))
setIsLoadingBottom(false)
}, 100)
}, [groupedMessages.length])

// Debounced range change handler to prevent excessive updates
const debouncedRangeChanged = useMemo(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The memory leak risk here is concerning. The debouncedRangeChanged function is recreated on every render because its dependency array includes several values that change frequently. This means the cleanup in the useEffect might not properly cancel the previous debounced function.

Consider using useRef to store the debounced function or restructuring the dependencies to be more stable.

() =>
debounce((range: ListRange) => {
const { startIndex, endIndex } = range

// Check if we need to load more messages at the top
if (startIndex <= LOAD_THRESHOLD && visibleRange.start > 0 && !isLoadingTop) {
loadMoreMessagesTop()
}

// Check if we need to load more messages at the bottom
if (
endIndex >= windowedMessages.messages.length - LOAD_THRESHOLD &&
visibleRange.end < groupedMessages.length &&
!isLoadingBottom
) {
loadMoreMessagesBottom()
}
}, 100),
[
visibleRange,
groupedMessages.length,
isLoadingTop,
isLoadingBottom,
windowedMessages.messages.length,
loadMoreMessagesTop,
loadMoreMessagesBottom,
],
)

// Scroll position tracking
const handleRangeChanged = useCallback(
(range: ListRange) => {
// Call the debounced function for loading more messages
debouncedRangeChanged(range)
},
[debouncedRangeChanged],
)

// Cleanup debounced function on unmount
useEffect(() => {
return () => {
if (debouncedRangeChanged && typeof (debouncedRangeChanged as any).cancel === "function") {
;(debouncedRangeChanged as any).cancel()
Comment on lines +1466 to +1467
Copy link

Copilot AI Aug 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The type casting to any for accessing the cancel method is not type-safe. Consider defining a proper type for the debounced function or importing the appropriate type from the debounce library.

Suggested change
if (debouncedRangeChanged && typeof (debouncedRangeChanged as any).cancel === "function") {
;(debouncedRangeChanged as any).cancel()
if (debouncedRangeChanged && typeof debouncedRangeChanged.cancel === "function") {
debouncedRangeChanged.cancel()

Copilot uses AI. Check for mistakes.
}
}
}, [debouncedRangeChanged])

// TEMPORARY DEBUGGING: Memory usage monitoring
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we add a TODO comment here to clarify when this temporary debugging code should be removed? Something like:

Suggested change
// TEMPORARY DEBUGGING: Memory usage monitoring
// TEMPORARY DEBUGGING: Memory usage monitoring
// TODO: Remove after verifying pagination performance in production (target: v2.x.x)

useEffect(() => {
// Only run in browsers that support performance.memory (Chrome/Edge)
if (!("memory" in performance)) {
Copy link

Copilot AI Aug 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The memory monitoring code should use proper type checking. Cast performance to any before accessing the memory property to avoid TypeScript issues, or use a proper type guard.

Copilot uses AI. Check for mistakes.
console.log("[ChatView Memory Monitor] performance.memory API not available in this browser")
return
}

const logMemoryUsage = () => {
const now = new Date()
const timestamp = now.toTimeString().split(" ")[0] // HH:MM:SS format

// Get memory info
const memoryInfo = (performance as any).memory
Copy link

Copilot AI Aug 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Consider adding a runtime check to ensure memoryInfo exists before accessing its properties, as the memory API might be undefined even when the property exists.

Suggested change
const memoryInfo = (performance as any).memory
const memoryInfo = (performance as any).memory
if (!memoryInfo) {
console.log(`[ChatView Memory Monitor - ${timestamp}] performance.memory is undefined`)
return
}

Copilot uses AI. Check for mistakes.
const heapUsedMB = (memoryInfo.usedJSHeapSize / 1048576).toFixed(1)

// Get message counts
const messagesInDOM = windowedMessages.messages.length
const totalMessages = groupedMessages.length

// Get visible range info
const visibleStart = windowedMessages.startIndex
const visibleEnd = windowedMessages.endIndex
const bufferStart = Math.max(0, visibleRange.start - BUFFER_SIZE)
const bufferEnd = Math.min(groupedMessages.length, visibleRange.end + BUFFER_SIZE)

// Check if pagination is active
const isPaginationActive = groupedMessages.length > VISIBLE_MESSAGE_COUNT

// Format and log the information
console.log(
`[ChatView Memory Monitor - ${timestamp}]\n` +
`- Heap Used: ${heapUsedMB} MB\n` +
`- Messages in DOM: ${messagesInDOM} / ${totalMessages} total\n` +
`- Visible Range: ${visibleStart}-${visibleEnd} (buffer: ${bufferStart}-${bufferEnd})\n` +
`- Pagination Active: ${isPaginationActive}`,
)
}

// Log immediately
logMemoryUsage()

// Set up interval to log every 5 seconds
const intervalId = setInterval(logMemoryUsage, 5000)

// Cleanup on unmount
return () => {
clearInterval(intervalId)
}
}, [
windowedMessages.messages.length,
windowedMessages.startIndex,
windowedMessages.endIndex,
groupedMessages.length,
visibleRange,
BUFFER_SIZE,
VISIBLE_MESSAGE_COUNT,
])
// END TEMPORARY DEBUGGING

// Loading indicator component
const LoadingIndicator = () => (
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The LoadingIndicator component could benefit from accessibility improvements. Consider adding an aria-label:

Suggested change
const LoadingIndicator = () => (
const LoadingIndicator = () => (
<div className="flex justify-center items-center py-4" role="status" aria-label="Loading more messages">
<div className="animate-spin rounded-full h-6 w-6 border-b-2 border-vscode-progressBar-background"></div>
<span className="sr-only">Loading...</span>
</div>
)

<div className="flex justify-center items-center py-4">
<div className="animate-spin rounded-full h-6 w-6 border-b-2 border-vscode-progressBar-background"></div>
</div>
)

// scrolling

const scrollToBottomSmooth = useMemo(
Expand Down Expand Up @@ -1471,12 +1701,15 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro

const itemContent = useCallback(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Consider adding error boundaries around the windowed message rendering to gracefully handle any rendering failures. This would prevent the entire chat from crashing if there's an issue with a specific message.

(index: number, messageOrGroup: ClineMessage | ClineMessage[]) => {
// Get the actual index in the original array
const actualIndex = windowedMessages.startIndex + index

// browser session group
if (Array.isArray(messageOrGroup)) {
return (
<BrowserSessionRow
messages={messageOrGroup}
isLast={index === groupedMessages.length - 1}
isLast={actualIndex === groupedMessages.length - 1}
lastModifiedMessage={modifiedMessages.at(-1)}
onHeightChange={handleRowHeightChange}
isStreaming={isStreaming}
Expand All @@ -1499,7 +1732,7 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
isExpanded={expandedRows[messageOrGroup.ts] || false}
onToggleExpand={toggleRowExpansion} // This was already stabilized
lastModifiedMessage={modifiedMessages.at(-1)} // Original direct access
isLast={index === groupedMessages.length - 1} // Original direct access
isLast={actualIndex === groupedMessages.length - 1} // Use actual index
onHeightChange={handleRowHeightChange}
isStreaming={isStreaming}
onSuggestionClick={handleSuggestionClickInRow} // This was already stabilized
Expand Down Expand Up @@ -1528,6 +1761,7 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
)
},
[
windowedMessages.startIndex,
expandedRows,
toggleRowExpansion,
modifiedMessages,
Expand Down Expand Up @@ -1842,21 +2076,27 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro
<div className="grow flex" ref={scrollContainerRef}>
<Virtuoso
ref={virtuosoRef}
key={task.ts} // trick to make sure virtuoso re-renders when task changes, and we use initialTopMostItemIndex to start at the bottom
key={task.ts}
className="scrollable grow overflow-y-scroll mb-1"
// increasing top by 3_000 to prevent jumping around when user collapses a row
increaseViewportBy={{ top: 3_000, bottom: Number.MAX_SAFE_INTEGER }} // hack to make sure the last message is always rendered to get truly perfect scroll to bottom animation when new messages are added (Number.MAX_SAFE_INTEGER is safe for arithmetic operations, which is all virtuoso uses this value for in src/sizeRangeSystem.ts)
data={groupedMessages} // messages is the raw format returned by extension, modifiedMessages is the manipulated structure that combines certain messages of related type, and visibleMessages is the filtered structure that removes messages that should not be rendered
data={windowedMessages.messages}
itemContent={itemContent}
rangeChanged={handleRangeChanged}
overscan={5} // Render 5 extra items outside viewport
components={{
Header: () => (isLoadingTop ? <LoadingIndicator /> : null),
Footer: () => (isLoadingBottom ? <LoadingIndicator /> : null),
}}
atBottomStateChange={(isAtBottom) => {
setIsAtBottom(isAtBottom)
if (isAtBottom) {
disableAutoScrollRef.current = false
}
setShowScrollToBottom(disableAutoScrollRef.current && !isAtBottom)
}}
atBottomThreshold={10} // anything lower causes issues with followOutput
initialTopMostItemIndex={groupedMessages.length - 1}
atBottomThreshold={10}
initialTopMostItemIndex={
windowedMessages.messages.length > 0 ? windowedMessages.messages.length - 1 : 0
}
/>
</div>
<div className={`flex-initial min-h-0 ${!areButtonsVisible ? "mb-1" : ""}`}>
Expand Down
Loading
Loading