@@ -84,97 +84,95 @@ const MainPage = ({ queryDb }) => {
8484 notes : ''
8585 } )
8686 }
87-
8887 const processChunks = async ( chunks , fileStream , downloadParams , headerState ) => {
89- // nothing to do if there are no chunks
9088 if ( chunks . length === 0 ) {
9189 dispatch ( addLog ( { message : t ( 'mainPage.noDataForHeader' ) , type : 'error' } ) )
9290 return
9391 }
9492
95- // 1) Fetch & write the very first chunk (this writes the header + its rows)
96- await fetchAndProcessChunk ( chunks [ 0 ] , 0 , fileStream , downloadParams , headerState )
97-
98- // 2) Fetch all the others in parallel (they’ll all skip their own header row)
99- const rest = chunks . slice ( 1 )
100- await Promise . all (
101- rest . map ( ( chunk , idx ) =>
102- fetchAndProcessChunk ( chunk , idx + 1 , fileStream , downloadParams , headerState )
93+ // (a) parallel fetch
94+ const fetches = chunks . map ( ( { dx, periods, ou } , idx ) => {
95+ const url = generateDownloadingUrl (
96+ dhis2Url ,
97+ ou ,
98+ dx . join ( ';' ) ,
99+ periods . join ( ';' ) ,
100+ downloadParams . co ,
101+ 'csv' ,
102+ downloadParams . layout
103103 )
104- )
105- }
104+ return fetchCsvData ( url , username , password )
105+ . then ( ( blob ) => blob . text ( ) )
106+ . then ( ( text ) => ( { idx, text, dx, periods } ) )
107+ . catch ( ( error ) => ( { idx, error, dx, periods } ) )
108+ } )
106109
107- const fetchAndProcessChunk = async ( chunk , index , fileStream , downloadParams , headerState ) => {
108- const { dx, periods, ou } = chunk
109- const chunkUrl = generateDownloadingUrl (
110- dhis2Url ,
111- ou ,
112- dx . join ( ';' ) ,
113- periods . join ( ';' ) ,
114- downloadParams . co ,
115- 'csv' ,
116- downloadParams . layout
117- )
118- console . log ( downloadParams . layout )
110+ // (b) wait for all
111+ const results = await Promise . all ( fetches )
112+
113+ // (c) sort in ascending index
114+ results . sort ( ( a , b ) => a . idx - b . idx )
115+
116+ // (d) write each chunk in order
117+ for ( const { idx, text, error, dx, periods } of results ) {
118+ if ( error ) {
119+ dispatch (
120+ addLog ( {
121+ message : t ( 'mainPage.chunkFailed' , {
122+ index : idx + 1 ,
123+ dx : dx . join ( ';' ) ,
124+ startPeriod : periods [ 0 ] ,
125+ endPeriod : periods [ periods . length - 1 ] ,
126+ error : error . message
127+ } ) ,
128+ type : 'error'
129+ } )
130+ )
131+ continue
132+ }
119133
120- try {
121- const blob = await fetchCsvData ( chunkUrl , username , password )
122- const text = await blob . text ( )
123- writeChunkToFile ( text , fileStream , headerState )
134+ writeChunkToFile ( text , fileStream , headerState , idx )
124135
125136 dispatch (
126137 addLog ( {
127138 message : t ( 'mainPage.chunkSuccess' , {
128- index : index + 1 ,
139+ index : idx + 1 ,
129140 dx : dx . join ( ';' ) ,
130141 startPeriod : periods [ 0 ] ,
131142 endPeriod : periods [ periods . length - 1 ]
132143 } ) ,
133144 type : 'info'
134145 } )
135146 )
136- } catch ( error ) {
137- dispatch (
138- addLog ( {
139- message : t ( 'mainPage.chunkFailed' , {
140- index : index + 1 ,
141- dx : dx . join ( ';' ) ,
142- startPeriod : periods [ 0 ] ,
143- endPeriod : periods [ periods . length - 1 ] ,
144- error : error ?. message
145- } ) ,
146- type : 'error'
147- } )
148- )
149- } finally {
150- if ( window . api . triggerGarbageCollection ) {
151- window . api . triggerGarbageCollection ( )
152- }
153147 }
154148 }
155149
156- const writeChunkToFile = ( text , fileStream , headerState ) => {
157- const rows = text . split ( '\n' ) . filter ( Boolean )
150+ const writeChunkToFile = ( text , fileStream , headerState , chunkIndex ) => {
151+ const rows = text . split ( '\n' ) . filter ( ( line ) => line . trim ( ) . length > 0 )
152+ if ( rows . length === 0 ) return
158153
159- if ( ! headerState . written ) {
160- if ( rows . length === 0 ) return
161- const header = rows [ 0 ] + ',downloaded_date'
162- fileStream . write ( header + '\n' )
163- headerState . written = true
164- rows . shift ( )
165- }
154+ let out = ''
166155
167- if ( rows . length > 0 ) {
168- const firstRow = rows [ 0 ]
169- if ( firstRow . toLowerCase ( ) . includes ( 'period' ) || firstRow . toLowerCase ( ) . includes ( 'orgunit' ) ) {
156+ if ( chunkIndex === 0 ) {
157+ // First chunk: pull off the header row, emit it once
158+ if ( ! headerState . written ) {
159+ const hdr = rows . shift ( )
160+ out += `${ hdr } ,downloaded_date\n`
161+ headerState . written = true
162+ } else {
163+ // (This shouldn’t happen, but just in case:)
170164 rows . shift ( )
171165 }
166+ } else {
167+ // Subsequent chunks: drop their header row
168+ rows . shift ( )
172169 }
173170
174171 if ( rows . length > 0 ) {
175- const dataWithDate = rows . map ( ( row ) => row + `,${ currentDate } ` ) . join ( '\n' )
176- fileStream . write ( dataWithDate + '\n' )
172+ out += rows . map ( ( r ) => `${ r } ,${ currentDate } ` ) . join ( '\n' ) + '\n'
177173 }
174+
175+ fileStream . write ( out )
178176 }
179177
180178 const handleDownloadClick = ( ) => {
@@ -196,7 +194,11 @@ const MainPage = ({ queryDb }) => {
196194 const saveFilePath = await getSaveFilePath ( )
197195 if ( ! saveFilePath ) return
198196
199- fileStream = window . fileSystem . createWriteStream ( saveFilePath )
197+ fileStream = window . fileSystem . createWriteStream ( saveFilePath , {
198+ flags : 'w' ,
199+ encoding : 'utf8'
200+ } )
201+ fileStream . write ( '\uFEFF' )
200202
201203 const downloadParams = getDownloadParameters ( layout )
202204 console . log ( downloadParams )
0 commit comments