-
Notifications
You must be signed in to change notification settings - Fork 2.5k
feat: implement dynamic pagination for ChatView to optimize performance #6575
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -1,7 +1,7 @@ | ||||||||||||||||
| import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from "react" | ||||||||||||||||
| import { useDeepCompareEffect, useEvent, useMount } from "react-use" | ||||||||||||||||
| import debounce from "debounce" | ||||||||||||||||
| import { Virtuoso, type VirtuosoHandle } from "react-virtuoso" | ||||||||||||||||
| import { Virtuoso, type VirtuosoHandle, type ListRange } from "react-virtuoso" | ||||||||||||||||
| import removeMd from "remove-markdown" | ||||||||||||||||
| import { VSCodeButton } from "@vscode/webview-ui-toolkit/react" | ||||||||||||||||
| import useSound from "use-sound" | ||||||||||||||||
|
|
@@ -189,6 +189,19 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro | |||||||||||||||
| const userRespondedRef = useRef<boolean>(false) | ||||||||||||||||
| const [currentFollowUpTs, setCurrentFollowUpTs] = useState<number | null>(null) | ||||||||||||||||
|
|
||||||||||||||||
| // Pagination state - Initialize to show last 20 messages | ||||||||||||||||
| const [visibleRange, setVisibleRange] = useState(() => { | ||||||||||||||||
| // Initialize with a default range that will be updated when messages load | ||||||||||||||||
| return { start: 0, end: 20 } | ||||||||||||||||
| }) | ||||||||||||||||
| const [isLoadingTop, setIsLoadingTop] = useState(false) | ||||||||||||||||
| const [isLoadingBottom, setIsLoadingBottom] = useState(false) | ||||||||||||||||
|
|
||||||||||||||||
| // Buffer configuration | ||||||||||||||||
| const VISIBLE_MESSAGE_COUNT = 20 | ||||||||||||||||
| const BUFFER_SIZE = 20 | ||||||||||||||||
| const LOAD_THRESHOLD = 5 // Load more when within 5 messages of edge | ||||||||||||||||
|
|
||||||||||||||||
| const clineAskRef = useRef(clineAsk) | ||||||||||||||||
| useEffect(() => { | ||||||||||||||||
| clineAskRef.current = clineAsk | ||||||||||||||||
|
|
@@ -452,6 +465,15 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro | |||||||||||||||
| retryCountRef.current.clear() | ||||||||||||||||
| }, [task?.ts]) | ||||||||||||||||
|
|
||||||||||||||||
| // Separate effect to handle initial pagination setup when task changes | ||||||||||||||||
| useEffect(() => { | ||||||||||||||||
| if (task) { | ||||||||||||||||
| // Reset pagination state when task changes | ||||||||||||||||
| // The actual range will be set when messages are processed | ||||||||||||||||
| setVisibleRange({ start: 0, end: VISIBLE_MESSAGE_COUNT }) | ||||||||||||||||
| } | ||||||||||||||||
| }, [task?.ts, task, VISIBLE_MESSAGE_COUNT]) | ||||||||||||||||
|
|
||||||||||||||||
| useEffect(() => { | ||||||||||||||||
| if (isHidden) { | ||||||||||||||||
| everVisibleMessagesTsRef.current.clear() | ||||||||||||||||
|
|
@@ -1306,6 +1328,214 @@ const ChatViewComponent: React.ForwardRefRenderFunction<ChatViewRef, ChatViewPro | |||||||||||||||
| return result | ||||||||||||||||
| }, [isCondensing, visibleMessages]) | ||||||||||||||||
|
|
||||||||||||||||
| // Update visible range when groupedMessages changes | ||||||||||||||||
| useEffect(() => { | ||||||||||||||||
| if (groupedMessages.length > 0) { | ||||||||||||||||
| setVisibleRange((prev) => { | ||||||||||||||||
| const totalMessages = groupedMessages.length | ||||||||||||||||
|
|
||||||||||||||||
| // If this looks like initial load (we're at default range and have many messages) | ||||||||||||||||
| if (prev.start === 0 && prev.end === VISIBLE_MESSAGE_COUNT && totalMessages > VISIBLE_MESSAGE_COUNT) { | ||||||||||||||||
| // Start at the bottom of the conversation | ||||||||||||||||
| return { | ||||||||||||||||
| start: Math.max(0, totalMessages - VISIBLE_MESSAGE_COUNT), | ||||||||||||||||
| end: totalMessages, | ||||||||||||||||
| } | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| // If we're already at the end and new messages arrived, adjust to include them | ||||||||||||||||
| if (prev.end === totalMessages - 1 || (isAtBottom && totalMessages > prev.end)) { | ||||||||||||||||
| return { | ||||||||||||||||
| start: Math.max(0, totalMessages - VISIBLE_MESSAGE_COUNT), | ||||||||||||||||
| end: totalMessages, | ||||||||||||||||
| } | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| // Otherwise keep current range | ||||||||||||||||
| return prev | ||||||||||||||||
| }) | ||||||||||||||||
| } | ||||||||||||||||
| }, [groupedMessages.length, isAtBottom]) | ||||||||||||||||
|
|
||||||||||||||||
| // Message windowing logic | ||||||||||||||||
| const windowedMessages = useMemo(() => { | ||||||||||||||||
| const totalWindowSize = VISIBLE_MESSAGE_COUNT + BUFFER_SIZE * 2 // 60 messages total | ||||||||||||||||
|
|
||||||||||||||||
| // Handle small conversations (less than total window size) | ||||||||||||||||
| if (groupedMessages.length <= totalWindowSize) { | ||||||||||||||||
| return { | ||||||||||||||||
| messages: groupedMessages, | ||||||||||||||||
| startIndex: 0, | ||||||||||||||||
| endIndex: groupedMessages.length, | ||||||||||||||||
| totalCount: groupedMessages.length, | ||||||||||||||||
| startPadding: 0, | ||||||||||||||||
| endPadding: 0, | ||||||||||||||||
| } | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| // Calculate the window with buffers | ||||||||||||||||
| const bufferStart = Math.max(0, visibleRange.start - BUFFER_SIZE) | ||||||||||||||||
| const bufferEnd = Math.min(groupedMessages.length, visibleRange.end + BUFFER_SIZE) | ||||||||||||||||
|
|
||||||||||||||||
| // Slice the messages array | ||||||||||||||||
| const windowed = groupedMessages.slice(bufferStart, bufferEnd) | ||||||||||||||||
|
|
||||||||||||||||
| // Add placeholder items for virtualization | ||||||||||||||||
| const startPadding = bufferStart | ||||||||||||||||
| const endPadding = Math.max(0, groupedMessages.length - bufferEnd) | ||||||||||||||||
|
|
||||||||||||||||
| return { | ||||||||||||||||
| messages: windowed, | ||||||||||||||||
| startIndex: bufferStart, | ||||||||||||||||
| endIndex: bufferEnd, | ||||||||||||||||
| totalCount: groupedMessages.length, | ||||||||||||||||
| startPadding, | ||||||||||||||||
| endPadding, | ||||||||||||||||
| } | ||||||||||||||||
| }, [groupedMessages, visibleRange]) | ||||||||||||||||
|
|
||||||||||||||||
| // Loading functions | ||||||||||||||||
| const loadMoreMessagesTop = useCallback(() => { | ||||||||||||||||
| setIsLoadingTop(true) | ||||||||||||||||
|
|
||||||||||||||||
| // Simulate async loading with setTimeout (in real implementation, this would be instant) | ||||||||||||||||
| setTimeout(() => { | ||||||||||||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is the 100ms setTimeout intentional here? This adds artificial latency to the pagination. Since the messages are already in memory, the loading should be instant. Consider removing the setTimeout or making it configurable if it's needed for UX reasons. |
||||||||||||||||
| setVisibleRange((prev) => ({ | ||||||||||||||||
| start: Math.max(0, prev.start - VISIBLE_MESSAGE_COUNT), | ||||||||||||||||
| end: prev.end, | ||||||||||||||||
| })) | ||||||||||||||||
| setIsLoadingTop(false) | ||||||||||||||||
| }, 100) | ||||||||||||||||
| }, []) | ||||||||||||||||
|
|
||||||||||||||||
| const loadMoreMessagesBottom = useCallback(() => { | ||||||||||||||||
| setIsLoadingBottom(true) | ||||||||||||||||
|
|
||||||||||||||||
| setTimeout(() => { | ||||||||||||||||
| setVisibleRange((prev) => ({ | ||||||||||||||||
| start: prev.start, | ||||||||||||||||
| end: Math.min(groupedMessages.length, prev.end + VISIBLE_MESSAGE_COUNT), | ||||||||||||||||
| })) | ||||||||||||||||
| setIsLoadingBottom(false) | ||||||||||||||||
| }, 100) | ||||||||||||||||
| }, [groupedMessages.length]) | ||||||||||||||||
|
|
||||||||||||||||
| // Debounced range change handler to prevent excessive updates | ||||||||||||||||
| const debouncedRangeChanged = useMemo( | ||||||||||||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The memory leak risk here is concerning. The Consider using |
||||||||||||||||
| () => | ||||||||||||||||
| debounce((range: ListRange) => { | ||||||||||||||||
| const { startIndex, endIndex } = range | ||||||||||||||||
|
|
||||||||||||||||
| // Check if we need to load more messages at the top | ||||||||||||||||
| if (startIndex <= LOAD_THRESHOLD && visibleRange.start > 0 && !isLoadingTop) { | ||||||||||||||||
| loadMoreMessagesTop() | ||||||||||||||||
| } | ||||||||||||||||
|
|
||||||||||||||||
| // Check if we need to load more messages at the bottom | ||||||||||||||||
| if ( | ||||||||||||||||
| endIndex >= windowedMessages.messages.length - LOAD_THRESHOLD && | ||||||||||||||||
| visibleRange.end < groupedMessages.length && | ||||||||||||||||
| !isLoadingBottom | ||||||||||||||||
| ) { | ||||||||||||||||
| loadMoreMessagesBottom() | ||||||||||||||||
| } | ||||||||||||||||
| }, 100), | ||||||||||||||||
| [ | ||||||||||||||||
| visibleRange, | ||||||||||||||||
| groupedMessages.length, | ||||||||||||||||
| isLoadingTop, | ||||||||||||||||
| isLoadingBottom, | ||||||||||||||||
| windowedMessages.messages.length, | ||||||||||||||||
| loadMoreMessagesTop, | ||||||||||||||||
| loadMoreMessagesBottom, | ||||||||||||||||
| ], | ||||||||||||||||
| ) | ||||||||||||||||
|
|
||||||||||||||||
| // Scroll position tracking | ||||||||||||||||
| const handleRangeChanged = useCallback( | ||||||||||||||||
| (range: ListRange) => { | ||||||||||||||||
| // Call the debounced function for loading more messages | ||||||||||||||||
| debouncedRangeChanged(range) | ||||||||||||||||
| }, | ||||||||||||||||
| [debouncedRangeChanged], | ||||||||||||||||
| ) | ||||||||||||||||
|
|
||||||||||||||||
| // Cleanup debounced function on unmount | ||||||||||||||||
| useEffect(() => { | ||||||||||||||||
| return () => { | ||||||||||||||||
| if (debouncedRangeChanged && typeof (debouncedRangeChanged as any).cancel === "function") { | ||||||||||||||||
| ;(debouncedRangeChanged as any).cancel() | ||||||||||||||||
|
Comment on lines
+1466
to
+1467
|
||||||||||||||||
| if (debouncedRangeChanged && typeof (debouncedRangeChanged as any).cancel === "function") { | |
| ;(debouncedRangeChanged as any).cancel() | |
| if (debouncedRangeChanged && typeof debouncedRangeChanged.cancel === "function") { | |
| debouncedRangeChanged.cancel() |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Could we add a TODO comment here to clarify when this temporary debugging code should be removed? Something like:
| // TEMPORARY DEBUGGING: Memory usage monitoring | |
| // TEMPORARY DEBUGGING: Memory usage monitoring | |
| // TODO: Remove after verifying pagination performance in production (target: v2.x.x) |
Copilot
AI
Aug 2, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The memory monitoring code should use proper type checking. Cast performance to any before accessing the memory property to avoid TypeScript issues, or use a proper type guard.
Copilot
AI
Aug 2, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Consider adding a runtime check to ensure memoryInfo exists before accessing its properties, as the memory API might be undefined even when the property exists.
| const memoryInfo = (performance as any).memory | |
| const memoryInfo = (performance as any).memory | |
| if (!memoryInfo) { | |
| console.log(`[ChatView Memory Monitor - ${timestamp}] performance.memory is undefined`) | |
| return | |
| } |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The LoadingIndicator component could benefit from accessibility improvements. Consider adding an aria-label:
| const LoadingIndicator = () => ( | |
| const LoadingIndicator = () => ( | |
| <div className="flex justify-center items-center py-4" role="status" aria-label="Loading more messages"> | |
| <div className="animate-spin rounded-full h-6 w-6 border-b-2 border-vscode-progressBar-background"></div> | |
| <span className="sr-only">Loading...</span> | |
| </div> | |
| ) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Consider adding error boundaries around the windowed message rendering to gracefully handle any rendering failures. This would prevent the entire chat from crashing if there's an issue with a specific message.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There's a potential race condition here with the loading states. If a user scrolls rapidly in both directions, multiple setTimeout callbacks could be queued, leading to inconsistent state. Consider using a ref to track the latest loading operation and cancel previous ones.