Skip to content

Commit 1141501

Browse files
committed
Add BOM Character and Change stream writing logic
1 parent 3806c56 commit 1141501

File tree

3 files changed

+100
-64
lines changed

3 files changed

+100
-64
lines changed

src/renderer/src/pages/MainPage/index.jsx

Lines changed: 63 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -84,97 +84,95 @@ const MainPage = ({ queryDb }) => {
8484
notes: ''
8585
})
8686
}
87-
8887
const processChunks = async (chunks, fileStream, downloadParams, headerState) => {
89-
// nothing to do if there are no chunks
9088
if (chunks.length === 0) {
9189
dispatch(addLog({ message: t('mainPage.noDataForHeader'), type: 'error' }))
9290
return
9391
}
9492

95-
// 1) Fetch & write the very first chunk (this writes the header + its rows)
96-
await fetchAndProcessChunk(chunks[0], 0, fileStream, downloadParams, headerState)
97-
98-
// 2) Fetch all the others in parallel (they’ll all skip their own header row)
99-
const rest = chunks.slice(1)
100-
await Promise.all(
101-
rest.map((chunk, idx) =>
102-
fetchAndProcessChunk(chunk, idx + 1, fileStream, downloadParams, headerState)
93+
// (a) parallel fetch
94+
const fetches = chunks.map(({ dx, periods, ou }, idx) => {
95+
const url = generateDownloadingUrl(
96+
dhis2Url,
97+
ou,
98+
dx.join(';'),
99+
periods.join(';'),
100+
downloadParams.co,
101+
'csv',
102+
downloadParams.layout
103103
)
104-
)
105-
}
104+
return fetchCsvData(url, username, password)
105+
.then((blob) => blob.text())
106+
.then((text) => ({ idx, text, dx, periods }))
107+
.catch((error) => ({ idx, error, dx, periods }))
108+
})
106109

107-
const fetchAndProcessChunk = async (chunk, index, fileStream, downloadParams, headerState) => {
108-
const { dx, periods, ou } = chunk
109-
const chunkUrl = generateDownloadingUrl(
110-
dhis2Url,
111-
ou,
112-
dx.join(';'),
113-
periods.join(';'),
114-
downloadParams.co,
115-
'csv',
116-
downloadParams.layout
117-
)
118-
console.log(downloadParams.layout)
110+
// (b) wait for all
111+
const results = await Promise.all(fetches)
112+
113+
// (c) sort in ascending index
114+
results.sort((a, b) => a.idx - b.idx)
115+
116+
// (d) write each chunk in order
117+
for (const { idx, text, error, dx, periods } of results) {
118+
if (error) {
119+
dispatch(
120+
addLog({
121+
message: t('mainPage.chunkFailed', {
122+
index: idx + 1,
123+
dx: dx.join(';'),
124+
startPeriod: periods[0],
125+
endPeriod: periods[periods.length - 1],
126+
error: error.message
127+
}),
128+
type: 'error'
129+
})
130+
)
131+
continue
132+
}
119133

120-
try {
121-
const blob = await fetchCsvData(chunkUrl, username, password)
122-
const text = await blob.text()
123-
writeChunkToFile(text, fileStream, headerState)
134+
writeChunkToFile(text, fileStream, headerState, idx)
124135

125136
dispatch(
126137
addLog({
127138
message: t('mainPage.chunkSuccess', {
128-
index: index + 1,
139+
index: idx + 1,
129140
dx: dx.join(';'),
130141
startPeriod: periods[0],
131142
endPeriod: periods[periods.length - 1]
132143
}),
133144
type: 'info'
134145
})
135146
)
136-
} catch (error) {
137-
dispatch(
138-
addLog({
139-
message: t('mainPage.chunkFailed', {
140-
index: index + 1,
141-
dx: dx.join(';'),
142-
startPeriod: periods[0],
143-
endPeriod: periods[periods.length - 1],
144-
error: error?.message
145-
}),
146-
type: 'error'
147-
})
148-
)
149-
} finally {
150-
if (window.api.triggerGarbageCollection) {
151-
window.api.triggerGarbageCollection()
152-
}
153147
}
154148
}
155149

156-
const writeChunkToFile = (text, fileStream, headerState) => {
157-
const rows = text.split('\n').filter(Boolean)
150+
const writeChunkToFile = (text, fileStream, headerState, chunkIndex) => {
151+
const rows = text.split('\n').filter((line) => line.trim().length > 0)
152+
if (rows.length === 0) return
158153

159-
if (!headerState.written) {
160-
if (rows.length === 0) return
161-
const header = rows[0] + ',downloaded_date'
162-
fileStream.write(header + '\n')
163-
headerState.written = true
164-
rows.shift()
165-
}
154+
let out = ''
166155

167-
if (rows.length > 0) {
168-
const firstRow = rows[0]
169-
if (firstRow.toLowerCase().includes('period') || firstRow.toLowerCase().includes('orgunit')) {
156+
if (chunkIndex === 0) {
157+
// First chunk: pull off the header row, emit it once
158+
if (!headerState.written) {
159+
const hdr = rows.shift()
160+
out += `${hdr},downloaded_date\n`
161+
headerState.written = true
162+
} else {
163+
// (This shouldn’t happen, but just in case:)
170164
rows.shift()
171165
}
166+
} else {
167+
// Subsequent chunks: drop their header row
168+
rows.shift()
172169
}
173170

174171
if (rows.length > 0) {
175-
const dataWithDate = rows.map((row) => row + `,${currentDate}`).join('\n')
176-
fileStream.write(dataWithDate + '\n')
172+
out += rows.map((r) => `${r},${currentDate}`).join('\n') + '\n'
177173
}
174+
175+
fileStream.write(out)
178176
}
179177

180178
const handleDownloadClick = () => {
@@ -196,7 +194,11 @@ const MainPage = ({ queryDb }) => {
196194
const saveFilePath = await getSaveFilePath()
197195
if (!saveFilePath) return
198196

199-
fileStream = window.fileSystem.createWriteStream(saveFilePath)
197+
fileStream = window.fileSystem.createWriteStream(saveFilePath, {
198+
flags: 'w',
199+
encoding: 'utf8'
200+
})
201+
fileStream.write('\uFEFF')
200202

201203
const downloadParams = getDownloadParameters(layout)
202204
console.log(downloadParams)

src/renderer/src/service/useApi.js

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,41 @@ export const fetchData = async (apiUrl, username, password, timeout = 3600000) =
2727
}
2828
}
2929

30+
export const fetchJsonData = async (apiUrl, username, password, timeout = 1200000) => {
31+
const controller = new AbortController()
32+
const { signal } = controller
33+
const fetchOptions = {
34+
headers: {
35+
Authorization: `Basic ${btoa(`${username}:${password}`)}`,
36+
Accept: 'application/json'
37+
},
38+
signal
39+
}
40+
const timeoutId = setTimeout(() => controller.abort(), timeout)
41+
42+
try {
43+
const response = await fetch(apiUrl, fetchOptions)
44+
clearTimeout(timeoutId)
45+
46+
if (!response.ok) {
47+
// Try to extract a helpful error message from an HTML error page
48+
const text = await response.text()
49+
const parser = new DOMParser()
50+
const doc = parser.parseFromString(text, 'text/html')
51+
const title = doc.querySelector('title')?.textContent
52+
throw new Error(title || `HTTP error! status: ${response.status}`)
53+
}
54+
55+
// Parse and return JSON
56+
return await response.json()
57+
} catch (error) {
58+
if (error.name === 'AbortError') {
59+
throw new Error(`Request timed out after ${timeout}ms`)
60+
}
61+
throw error
62+
}
63+
}
64+
3065
export const fetchCsvData = async (apiUrl, username, password, timeout = 1200000) => {
3166
const controller = new AbortController()
3267
const { signal } = controller

src/renderer/src/utils/downloadUtils.js

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,8 @@ export const generateDownloadingUrl = (
44
dx,
55
pe,
66
co,
7-
format = 'csv',
8-
layout = { rows: ['ou', 'pe', 'dx'], columns: [] }
9-
) => {
7+
format = 'json',
8+
layout = { rows: ['ou', 'pe', 'dx'], columns: [] }) => {
109
// Base dimension URL parts
1110
let parameters = `api/analytics.${format}?dimension=ou:${ou}&dimension=pe:${pe}&dimension=dx:${dx}`
1211

0 commit comments

Comments
 (0)