@@ -10,9 +10,9 @@ import Users from "@/lib/components/users";
1010import { Version , getVersion } from "@/lib/components/version" ;
1111import { Location } from "@/lib/components/location" ;
1212import { type Location as LocationType } from "@/lib/location"
13- import { parseNginxLogs } from "@/lib/parse" ;
1413import { useCallback , useEffect , useMemo , useRef , useState , useTransition } from "react" ;
1514import type { WorkerResult } from '@/lib/analytics.worker' ;
15+ import type { ParseWorkerResult } from '@/lib/parse.worker' ;
1616import { Device } from "@/lib/components/device/device" ;
1717import { type Filter , newFilter } from "@/lib/filter" ;
1818import { Period , periodStart } from "@/lib/period" ;
@@ -32,13 +32,9 @@ import dynamic from "next/dynamic";
3232const NetworkBackground = dynamic ( ( ) => import ( "./network-background" ) , { ssr : false } ) ;
3333const FileUpload = dynamic ( ( ) => import ( "./file-upload" ) ) ;
3434
35- const PARSE_CHUNK_SIZE = 5000 ;
36-
3735export default function Dashboard ( { fileUpload, demo, logFormat } : { fileUpload : boolean , demo : boolean , logFormat ?: string } ) {
3836 const [ accessLogs , setAccessLogs ] = useState < string [ ] > ( [ ] ) ;
3937 const [ logs , setLogs ] = useState < NginxLog [ ] > ( [ ] ) ;
40- const parsedAccessCount = useRef ( 0 ) ;
41- const parseCancelRef = useRef ( false ) ;
4238
4339 const [ errorLogs , setErrorLogs ] = useState < string [ ] > ( [ ] ) ;
4440
@@ -96,13 +92,15 @@ export default function Dashboard({ fileUpload, demo, logFormat }: { fileUpload:
9692 startTransition ( ( ) => setFilter ( ( previous ) => ( { ...previous , dayOfWeek } ) ) )
9793 } , [ ] )
9894
99- // Worker ref and result state
95+ // Worker refs and result state
10096 const workerRef = useRef < Worker | null > ( null ) ;
10197 const workerSeqRef = useRef ( 0 ) ;
10298 const workerRawCountRef = useRef ( 0 ) ;
99+ const computeDebounceRef = useRef < ReturnType < typeof setTimeout > | null > ( null ) ;
100+ const parseWorkerRef = useRef < Worker | null > ( null ) ;
103101 const [ workerResult , setWorkerResult ] = useState < WorkerResult | null > ( null ) ;
104102
105- // Create worker once on mount
103+ // Create workers once on mount
106104 useEffect ( ( ) => {
107105 const worker = new Worker ( new URL ( '../analytics.worker.ts' , import . meta. url ) ) ;
108106 workerRef . current = worker ;
@@ -111,25 +109,49 @@ export default function Dashboard({ fileUpload, demo, logFormat }: { fileUpload:
111109 setWorkerResult ( e . data ) ;
112110 }
113111 } ;
114- return ( ) => worker . terminate ( ) ;
112+
113+ const parseWorker = new Worker ( new URL ( '../parse.worker.ts' , import . meta. url ) ) ;
114+ parseWorkerRef . current = parseWorker ;
115+ parseWorker . onmessage = ( e : MessageEvent < ParseWorkerResult > ) => {
116+ const { logs : newLogs , maxTimestamp, isFirstBatch } = e . data ;
117+ if ( newLogs . length === 0 ) return ;
118+ if ( isFirstBatch && maxTimestamp !== null ) {
119+ const maxDate = new Date ( maxTimestamp ) ;
120+ if ( inPeriod ( maxDate , 'week' ) ) setPeriod ( 'week' ) ;
121+ else if ( inPeriod ( maxDate , 'month' ) ) setPeriod ( 'month' ) ;
122+ else if ( inPeriod ( maxDate , '6 months' ) ) setPeriod ( '6 months' ) ;
123+ else setPeriod ( 'all time' ) ;
124+ }
125+ setLogs ( prev => [ ...prev , ...newLogs ] ) ;
126+ } ;
127+
128+ return ( ) => {
129+ worker . terminate ( ) ;
130+ parseWorker . terminate ( ) ;
131+ } ;
115132 } , [ ] ) ;
116133
117- // Send raw logs to worker for parsing (fires before compute effect)
134+ // Send raw logs to both workers for parsing (fires before compute effect)
118135 useEffect ( ( ) => {
119- if ( ! workerRef . current || accessLogs . length <= workerRawCountRef . current ) return ;
136+ if ( accessLogs . length <= workerRawCountRef . current ) return ;
120137 const newRawLogs = accessLogs . slice ( workerRawCountRef . current ) ;
138+ const isFirstBatch = workerRawCountRef . current === 0 ;
121139 workerRawCountRef . current = accessLogs . length ;
122- workerRef . current . postMessage ( { type : 'parseAndStore' , rawLogs : newRawLogs , logFormat } ) ;
140+ workerRef . current ?. postMessage ( { type : 'parseAndStore' , rawLogs : newRawLogs , logFormat } ) ;
141+ parseWorkerRef . current ?. postMessage ( { rawLogs : newRawLogs , logFormat, isFirstBatch } ) ;
123142 } , [ accessLogs ] ) ;
124143
125- // Trigger computation when inputs change
144+ // Trigger computation when inputs change — debounced to avoid redundant work on rapid changes
126145 useEffect ( ( ) => {
127146 if ( ! workerRef . current ) return ;
128- const seq = ++ workerSeqRef . current ;
129- const locationMapEntries : [ string , string ] [ ] = filter . location !== null
130- ? Array . from ( locationMap . entries ( ) ) . map ( ( [ ip , loc ] ) => [ ip , loc . country ] )
131- : [ ] ;
132- workerRef . current . postMessage ( { type : 'compute' , seq, filter, settings, locationMap : locationMapEntries } ) ;
147+ if ( computeDebounceRef . current ) clearTimeout ( computeDebounceRef . current ) ;
148+ computeDebounceRef . current = setTimeout ( ( ) => {
149+ const seq = ++ workerSeqRef . current ;
150+ const locationMapEntries : [ string , string ] [ ] = filter . location !== null
151+ ? Array . from ( locationMap . entries ( ) ) . map ( ( [ ip , loc ] ) => [ ip , loc . country ] )
152+ : [ ] ;
153+ workerRef . current ?. postMessage ( { type : 'compute' , seq, filter, settings, locationMap : locationMapEntries } ) ;
154+ } , 50 ) ;
133155 } , [ accessLogs , filter , settings , locationMap ] ) ;
134156
135157 useEffect ( ( ) => {
@@ -196,63 +218,6 @@ export default function Dashboard({ fileUpload, demo, logFormat }: { fileUpload:
196218 return url ;
197219 }
198220
199- useEffect ( ( ) => {
200- if ( accessLogs . length <= parsedAccessCount . current ) return ;
201-
202- const newRawLogs = accessLogs . slice ( parsedAccessCount . current ) ;
203- const isFirstBatch = parsedAccessCount . current === 0 ;
204- parsedAccessCount . current = accessLogs . length ;
205-
206- const initPeriod = ( parsed : ReturnType < typeof parseNginxLogs > ) => {
207- let maxDate = parsed [ 0 ] . timestamp ;
208- for ( const log of parsed ) {
209- if ( log . timestamp && ( ! maxDate || log . timestamp > maxDate ) ) {
210- maxDate = log . timestamp ;
211- }
212- }
213- if ( maxDate ) {
214- if ( inPeriod ( maxDate , 'week' ) ) setPeriod ( 'week' ) ;
215- else if ( inPeriod ( maxDate , 'month' ) ) setPeriod ( 'month' ) ;
216- else if ( inPeriod ( maxDate , '6 months' ) ) setPeriod ( '6 months' ) ;
217- else setPeriod ( 'all time' ) ;
218- }
219- } ;
220-
221- // Small batches: parse synchronously
222- if ( newRawLogs . length <= PARSE_CHUNK_SIZE ) {
223- const newParsed = parseNginxLogs ( newRawLogs , logFormat ) ;
224- if ( newParsed . length === 0 ) return ;
225- if ( isFirstBatch ) initPeriod ( newParsed ) ;
226- setLogs ( prev => [ ...prev , ...newParsed ] ) ;
227- return ;
228- }
229-
230- // Large batches: chunk with setTimeout to avoid blocking the main thread,
231- // but accumulate all results and update state only once at the end.
232- parseCancelRef . current = false ;
233- let offset = 0 ;
234- const allParsed : ReturnType < typeof parseNginxLogs > = [ ] ;
235- const processChunk = ( ) => {
236- if ( parseCancelRef . current ) return ;
237- const chunk = newRawLogs . slice ( offset , offset + PARSE_CHUNK_SIZE ) ;
238- if ( chunk . length === 0 ) return ;
239- const parsed = parseNginxLogs ( chunk , logFormat ) ;
240- if ( parsed . length > 0 ) allParsed . push ( ...parsed ) ;
241- offset += PARSE_CHUNK_SIZE ;
242- if ( offset < newRawLogs . length ) {
243- setTimeout ( processChunk , 0 ) ;
244- } else {
245- // All chunks done — single state update
246- if ( allParsed . length > 0 ) {
247- if ( isFirstBatch ) initPeriod ( allParsed ) ;
248- setLogs ( prev => [ ...prev , ...allParsed ] ) ;
249- }
250- }
251- } ;
252- processChunk ( ) ;
253-
254- return ( ) => { parseCancelRef . current = true ; } ;
255- } , [ accessLogs ] )
256221
257222
258223
0 commit comments