From da75d27db1283ba33c8a3ba6125cf9d1e174f5ad Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Sun, 10 Aug 2025 18:07:37 +0530 Subject: [PATCH 1/5] feat: parallel chunkedUpload in web and node --- templates/node/src/client.ts.twig | 113 +++++++++++++++++++++++------- templates/web/src/client.ts.twig | 113 +++++++++++++++++++++++------- 2 files changed, 178 insertions(+), 48 deletions(-) diff --git a/templates/node/src/client.ts.twig b/templates/node/src/client.ts.twig index 7bfbb9b9d..0dd656a16 100644 --- a/templates/node/src/client.ts.twig +++ b/templates/node/src/client.ts.twig @@ -67,6 +67,7 @@ function getUserAgent() { class Client { static CHUNK_SIZE = 1024 * 1024 * 5; + static MAX_CONCURRENCY = 5; config = { endpoint: '{{ spec.endpoint }}', @@ -211,38 +212,102 @@ class Client { return await this.call(method, url, headers, originalPayload); } - let start = 0; - let response = null; + const totalChunks = Math.ceil(file.size / Client.CHUNK_SIZE); + const chunks: Array<{ index: number; start: number; end: number; chunk: Blob }> = []; - while (start < file.size) { - let end = start + Client.CHUNK_SIZE; // Prepare end for the next chunk - if (end >= file.size) { - end = file.size; // Adjust for the last chunk to include the last byte - } - - headers['content-range'] = `bytes ${start}-${end-1}/${file.size}`; + for (let i = 0; i < totalChunks; i++) { + const start = i * Client.CHUNK_SIZE; + const end = Math.min(start + Client.CHUNK_SIZE, file.size); const chunk = file.slice(start, end); + chunks.push({ index: i, start, end, chunk }); + } - let payload = { ...originalPayload }; - payload[fileParam] = new File([chunk], file.name); + const firstChunk = chunks[0]; + const firstChunkHeaders = { ...headers }; + firstChunkHeaders['content-range'] = `bytes ${firstChunk?.start}-${(firstChunk?.end ?? 0) - 1}/${file.size}`; + + const firstPayload = { ...originalPayload }; + firstPayload[fileParam] = new File([firstChunk?.chunk ?? new Blob([])], file.name); + + const firstResponse = await this.call(method, url, firstChunkHeaders, firstPayload); + + if (!firstResponse?.$id) { + throw new Error('First chunk upload failed - no ID returned'); + } - response = await this.call(method, url, headers, payload); + let completedChunks = 1; + let totalUploaded = firstChunk?.end ?? 0; + + if (onProgress && typeof onProgress === 'function') { + onProgress({ + $id: firstResponse.$id, + progress: Math.round((totalUploaded / file.size) * 100), + sizeUploaded: totalUploaded, + chunksTotal: totalChunks, + chunksUploaded: completedChunks + }); + } - if (onProgress && typeof onProgress === 'function') { - onProgress({ - $id: response.$id, - progress: Math.round((end / file.size) * 100), - sizeUploaded: end, - chunksTotal: Math.ceil(file.size / Client.CHUNK_SIZE), - chunksUploaded: Math.ceil(end / Client.CHUNK_SIZE) - }); - } + if (totalChunks === 1) { + return firstResponse; + } - if (response && response.$id) { - headers['x-{{spec.title | caseLower }}-id'] = response.$id; + const remainingChunks = chunks.slice(1); + let response = firstResponse; + + for (let batchStart = 0; batchStart < remainingChunks.length; batchStart += Client.MAX_CONCURRENCY) { + const batch = remainingChunks.slice(batchStart, batchStart + Client.MAX_CONCURRENCY); + + const batchPromises = batch.map(async (chunkInfo) => { + const chunkHeaders = { ...headers }; + chunkHeaders['content-range'] = `bytes ${chunkInfo?.start}-${chunkInfo?.end - 1}/${file.size}`; + chunkHeaders['x-appwrite-id'] = firstResponse.$id; + + const payload = { ...originalPayload }; + payload[fileParam] = new File([chunkInfo.chunk], file.name); + + try { + const chunkResponse = await this.call(method, url, chunkHeaders, payload); + return { + success: true, + response: chunkResponse, + chunkInfo, + error: null + }; + } catch (error) { + return { + success: false, + response: null, + chunkInfo, + error + }; + } + }); + const batchResults = await Promise.all(batchPromises); + + const failures = batchResults.filter(result => !result.success); + if (failures.length > 0) { + const errorMessages = failures.map(f => `Chunk ${f.chunkInfo.index}: ${f.error}`); + throw new Error(`Chunk upload failures: ${errorMessages.join(', ')}`); } - start = end; + for (const result of batchResults) { + if (result.success) { + completedChunks++; + totalUploaded += (result.chunkInfo.end - result.chunkInfo.start); + response = result.response; + + if (onProgress && typeof onProgress === 'function') { + onProgress({ + $id: firstResponse.$id, + progress: Math.round((totalUploaded / file.size) * 100), + sizeUploaded: totalUploaded, + chunksTotal: totalChunks, + chunksUploaded: completedChunks + }); + } + } + } } return response; diff --git a/templates/web/src/client.ts.twig b/templates/web/src/client.ts.twig index 92b1e7b4b..354a51f37 100644 --- a/templates/web/src/client.ts.twig +++ b/templates/web/src/client.ts.twig @@ -296,6 +296,7 @@ class {{spec.title | caseUcfirst}}Exception extends Error { */ class Client { static CHUNK_SIZE = 1024 * 1024 * 5; + static MAX_CONCURRENCY = 5; /** * Holds configuration such as project. @@ -639,38 +640,102 @@ class Client { return await this.call(method, url, headers, originalPayload); } - let start = 0; - let response = null; + const totalChunks = Math.ceil(file.size / Client.CHUNK_SIZE); + const chunks: Array<{ index: number; start: number; end: number; chunk: Blob }> = []; - while (start < file.size) { - let end = start + Client.CHUNK_SIZE; // Prepare end for the next chunk - if (end >= file.size) { - end = file.size; // Adjust for the last chunk to include the last byte - } - - headers['content-range'] = `bytes ${start}-${end-1}/${file.size}`; + for (let i = 0; i < totalChunks; i++) { + const start = i * Client.CHUNK_SIZE; + const end = Math.min(start + Client.CHUNK_SIZE, file.size); const chunk = file.slice(start, end); + chunks.push({ index: i, start, end, chunk }); + } - let payload = { ...originalPayload }; - payload[fileParam] = new File([chunk], file.name); + const firstChunk = chunks[0]; + const firstChunkHeaders = { ...headers }; + firstChunkHeaders['content-range'] = `bytes ${firstChunk?.start}-${(firstChunk?.end ?? 0) - 1}/${file.size}`; + + const firstPayload = { ...originalPayload }; + firstPayload[fileParam] = new File([firstChunk?.chunk ?? new Blob()], file.name); + + const firstResponse = await this.call(method, url, firstChunkHeaders, firstPayload); + + if (!firstResponse?.$id) { + throw new Error('First chunk upload failed - no ID returned'); + } - response = await this.call(method, url, headers, payload); + let completedChunks = 1; + let totalUploaded = firstChunk?.end ?? 0; + + if (onProgress && typeof onProgress === 'function') { + onProgress({ + $id: firstResponse.$id, + progress: Math.round((totalUploaded / file.size) * 100), + sizeUploaded: totalUploaded, + chunksTotal: totalChunks, + chunksUploaded: completedChunks + }); + } - if (onProgress && typeof onProgress === 'function') { - onProgress({ - $id: response.$id, - progress: Math.round((end / file.size) * 100), - sizeUploaded: end, - chunksTotal: Math.ceil(file.size / Client.CHUNK_SIZE), - chunksUploaded: Math.ceil(end / Client.CHUNK_SIZE) - }); - } + if (totalChunks === 1) { + return firstResponse; + } - if (response && response.$id) { - headers['x-{{spec.title | caseLower }}-id'] = response.$id; + const remainingChunks = chunks.slice(1); + let response = firstResponse; + + for (let batchStart = 0; batchStart < remainingChunks.length; batchStart += Client.MAX_CONCURRENCY) { + const batch = remainingChunks.slice(batchStart, batchStart + Client.MAX_CONCURRENCY); + + const batchPromises = batch.map(async (chunkInfo) => { + const chunkHeaders = { ...headers }; + chunkHeaders['content-range'] = `bytes ${chunkInfo?.start}-${chunkInfo?.end - 1}/${file.size}`; + chunkHeaders['x-appwrite-id'] = firstResponse.$id; + + const payload = { ...originalPayload }; + payload[fileParam] = new File([chunkInfo.chunk], file.name); + + try { + const chunkResponse = await this.call(method, url, chunkHeaders, payload); + return { + success: true, + response: chunkResponse, + chunkInfo, + error: null + }; + } catch (error) { + return { + success: false, + response: null, + chunkInfo, + error + }; + } + }); + const batchResults = await Promise.all(batchPromises); + + const failures = batchResults.filter(result => !result.success); + if (failures.length > 0) { + const errorMessages = failures.map(f => `Chunk ${f.chunkInfo.index}: ${f.error}`); + throw new Error(`Chunk upload failures: ${errorMessages.join(', ')}`); } - start = end; + for (const result of batchResults) { + if (result.success) { + completedChunks++; + totalUploaded += (result.chunkInfo.end - result.chunkInfo.start); + response = result.response; + + if (onProgress && typeof onProgress === 'function') { + onProgress({ + $id: firstResponse.$id, + progress: Math.round((totalUploaded / file.size) * 100), + sizeUploaded: totalUploaded, + chunksTotal: totalChunks, + chunksUploaded: completedChunks + }); + } + } + } } return response; From 3ee8ddf9f7079bffcd53676f5d1630ed1020f9e4 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Sun, 10 Aug 2025 18:15:24 +0530 Subject: [PATCH 2/5] fix: some issues --- templates/node/src/client.ts.twig | 15 ++++++++++----- templates/web/src/client.ts.twig | 15 ++++++++++----- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/templates/node/src/client.ts.twig b/templates/node/src/client.ts.twig index 0dd656a16..04f271f62 100644 --- a/templates/node/src/client.ts.twig +++ b/templates/node/src/client.ts.twig @@ -223,11 +223,16 @@ class Client { } const firstChunk = chunks[0]; + + if (!firstChunk) { + throw new Error('First chunk not found'); + } + const firstChunkHeaders = { ...headers }; - firstChunkHeaders['content-range'] = `bytes ${firstChunk?.start}-${(firstChunk?.end ?? 0) - 1}/${file.size}`; + firstChunkHeaders['content-range'] = `bytes ${firstChunk.start}-${(firstChunk.end ?? 0) - 1}/${file.size}`; const firstPayload = { ...originalPayload }; - firstPayload[fileParam] = new File([firstChunk?.chunk ?? new Blob([])], file.name); + firstPayload[fileParam] = new File([firstChunk.chunk], file.name); const firstResponse = await this.call(method, url, firstChunkHeaders, firstPayload); @@ -236,7 +241,7 @@ class Client { } let completedChunks = 1; - let totalUploaded = firstChunk?.end ?? 0; + let totalUploaded = firstChunk.end ?? 0; if (onProgress && typeof onProgress === 'function') { onProgress({ @@ -260,8 +265,8 @@ class Client { const batchPromises = batch.map(async (chunkInfo) => { const chunkHeaders = { ...headers }; - chunkHeaders['content-range'] = `bytes ${chunkInfo?.start}-${chunkInfo?.end - 1}/${file.size}`; - chunkHeaders['x-appwrite-id'] = firstResponse.$id; + chunkHeaders['content-range'] = `bytes ${chunkInfo.start}-${chunkInfo.end - 1}/${file.size}`; + chunkHeaders['x-{{spec.title | caseLower}}-id'] = firstResponse.$id; const payload = { ...originalPayload }; payload[fileParam] = new File([chunkInfo.chunk], file.name); diff --git a/templates/web/src/client.ts.twig b/templates/web/src/client.ts.twig index 354a51f37..e0a80e47c 100644 --- a/templates/web/src/client.ts.twig +++ b/templates/web/src/client.ts.twig @@ -651,11 +651,16 @@ class Client { } const firstChunk = chunks[0]; + + if (!firstChunk) { + throw new Error('First chunk not found'); + } + const firstChunkHeaders = { ...headers }; - firstChunkHeaders['content-range'] = `bytes ${firstChunk?.start}-${(firstChunk?.end ?? 0) - 1}/${file.size}`; + firstChunkHeaders['content-range'] = `bytes ${firstChunk.start}-${(firstChunk.end ?? 0) - 1}/${file.size}`; const firstPayload = { ...originalPayload }; - firstPayload[fileParam] = new File([firstChunk?.chunk ?? new Blob()], file.name); + firstPayload[fileParam] = new File([firstChunk.chunk], file.name); const firstResponse = await this.call(method, url, firstChunkHeaders, firstPayload); @@ -664,7 +669,7 @@ class Client { } let completedChunks = 1; - let totalUploaded = firstChunk?.end ?? 0; + let totalUploaded = firstChunk.end ?? 0; if (onProgress && typeof onProgress === 'function') { onProgress({ @@ -688,8 +693,8 @@ class Client { const batchPromises = batch.map(async (chunkInfo) => { const chunkHeaders = { ...headers }; - chunkHeaders['content-range'] = `bytes ${chunkInfo?.start}-${chunkInfo?.end - 1}/${file.size}`; - chunkHeaders['x-appwrite-id'] = firstResponse.$id; + chunkHeaders['content-range'] = `bytes ${chunkInfo.start}-${chunkInfo.end - 1}/${file.size}`; + chunkHeaders['x-{{spec.title | caseLower }}-id'] = firstResponse.$id; const payload = { ...originalPayload }; payload[fileParam] = new File([chunkInfo.chunk], file.name); From 0246b3bfda7559e0e60867c57a2ee71f1119b1d0 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Sun, 10 Aug 2025 18:16:32 +0530 Subject: [PATCH 3/5] update conurrency to 6 --- templates/node/src/client.ts.twig | 2 +- templates/web/src/client.ts.twig | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/node/src/client.ts.twig b/templates/node/src/client.ts.twig index 04f271f62..80cf74d47 100644 --- a/templates/node/src/client.ts.twig +++ b/templates/node/src/client.ts.twig @@ -67,7 +67,7 @@ function getUserAgent() { class Client { static CHUNK_SIZE = 1024 * 1024 * 5; - static MAX_CONCURRENCY = 5; + static MAX_CONCURRENCY = 6; config = { endpoint: '{{ spec.endpoint }}', diff --git a/templates/web/src/client.ts.twig b/templates/web/src/client.ts.twig index e0a80e47c..dc32f2f58 100644 --- a/templates/web/src/client.ts.twig +++ b/templates/web/src/client.ts.twig @@ -296,7 +296,7 @@ class {{spec.title | caseUcfirst}}Exception extends Error { */ class Client { static CHUNK_SIZE = 1024 * 1024 * 5; - static MAX_CONCURRENCY = 5; + static MAX_CONCURRENCY = 6; /** * Holds configuration such as project. From aa0c3d401a98aa483904c70266c3f415b682cd84 Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Sun, 10 Aug 2025 18:23:58 +0530 Subject: [PATCH 4/5] chore: create chunks on demand --- templates/node/src/client.ts.twig | 79 +++++++++++++++---------------- templates/web/src/client.ts.twig | 79 +++++++++++++++---------------- 2 files changed, 76 insertions(+), 82 deletions(-) diff --git a/templates/node/src/client.ts.twig b/templates/node/src/client.ts.twig index 80cf74d47..8096c8b82 100644 --- a/templates/node/src/client.ts.twig +++ b/templates/node/src/client.ts.twig @@ -213,20 +213,10 @@ class Client { } const totalChunks = Math.ceil(file.size / Client.CHUNK_SIZE); - const chunks: Array<{ index: number; start: number; end: number; chunk: Blob }> = []; - for (let i = 0; i < totalChunks; i++) { - const start = i * Client.CHUNK_SIZE; - const end = Math.min(start + Client.CHUNK_SIZE, file.size); - const chunk = file.slice(start, end); - chunks.push({ index: i, start, end, chunk }); - } - - const firstChunk = chunks[0]; - - if (!firstChunk) { - throw new Error('First chunk not found'); - } + const firstChunkStart = 0; + const firstChunkEnd = Math.min(Client.CHUNK_SIZE, file.size); + const firstChunk = file.slice(firstChunkStart, firstChunkEnd); const firstChunkHeaders = { ...headers }; firstChunkHeaders['content-range'] = `bytes ${firstChunk.start}-${(firstChunk.end ?? 0) - 1}/${file.size}`; @@ -257,39 +247,46 @@ class Client { return firstResponse; } - const remainingChunks = chunks.slice(1); let response = firstResponse; - for (let batchStart = 0; batchStart < remainingChunks.length; batchStart += Client.MAX_CONCURRENCY) { - const batch = remainingChunks.slice(batchStart, batchStart + Client.MAX_CONCURRENCY); + for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex += Client.MAX_CONCURRENCY) { + const batchEnd = Math.min(chunkIndex + Client.MAX_CONCURRENCY, totalChunks); - const batchPromises = batch.map(async (chunkInfo) => { - const chunkHeaders = { ...headers }; - chunkHeaders['content-range'] = `bytes ${chunkInfo.start}-${chunkInfo.end - 1}/${file.size}`; - chunkHeaders['x-{{spec.title | caseLower}}-id'] = firstResponse.$id; + const batchPromises = []; + for (let i = chunkIndex; i < batchEnd; i++) { + const start = i * Client.CHUNK_SIZE; + const end = Math.min(start + Client.CHUNK_SIZE, file.size); - const payload = { ...originalPayload }; - payload[fileParam] = new File([chunkInfo.chunk], file.name); - - try { - const chunkResponse = await this.call(method, url, chunkHeaders, payload); - return { - success: true, - response: chunkResponse, - chunkInfo, - error: null - }; - } catch (error) { - return { - success: false, - response: null, - chunkInfo, - error - }; - } - }); + batchPromises.push((async () => { + const chunk = file.slice(start, end); + const chunkHeaders = { ...headers }; + chunkHeaders['content-range'] = `bytes ${start}-${end - 1}/${file.size}`; + chunkHeaders['x-{{spec.title | caseLower}}-id'] = firstResponse.$id; + + const payload = { ...originalPayload }; + payload[fileParam] = new File([chunk], file.name); + + try { + const chunkResponse = await this.call(method, url, chunkHeaders, payload); + return { + success: true, + response: chunkResponse, + chunkInfo: { index: i, start, end }, + error: null + }; + } catch (error) { + return { + success: false, + response: null, + chunkInfo: { index: i, start, end }, + error + }; + } + })()); + } + const batchResults = await Promise.all(batchPromises); - + const failures = batchResults.filter(result => !result.success); if (failures.length > 0) { const errorMessages = failures.map(f => `Chunk ${f.chunkInfo.index}: ${f.error}`); diff --git a/templates/web/src/client.ts.twig b/templates/web/src/client.ts.twig index dc32f2f58..19f6fc8b0 100644 --- a/templates/web/src/client.ts.twig +++ b/templates/web/src/client.ts.twig @@ -641,20 +641,10 @@ class Client { } const totalChunks = Math.ceil(file.size / Client.CHUNK_SIZE); - const chunks: Array<{ index: number; start: number; end: number; chunk: Blob }> = []; - for (let i = 0; i < totalChunks; i++) { - const start = i * Client.CHUNK_SIZE; - const end = Math.min(start + Client.CHUNK_SIZE, file.size); - const chunk = file.slice(start, end); - chunks.push({ index: i, start, end, chunk }); - } - - const firstChunk = chunks[0]; - - if (!firstChunk) { - throw new Error('First chunk not found'); - } + const firstChunkStart = 0; + const firstChunkEnd = Math.min(Client.CHUNK_SIZE, file.size); + const firstChunk = file.slice(firstChunkStart, firstChunkEnd); const firstChunkHeaders = { ...headers }; firstChunkHeaders['content-range'] = `bytes ${firstChunk.start}-${(firstChunk.end ?? 0) - 1}/${file.size}`; @@ -685,39 +675,46 @@ class Client { return firstResponse; } - const remainingChunks = chunks.slice(1); let response = firstResponse; - for (let batchStart = 0; batchStart < remainingChunks.length; batchStart += Client.MAX_CONCURRENCY) { - const batch = remainingChunks.slice(batchStart, batchStart + Client.MAX_CONCURRENCY); + for (let chunkIndex = 1; chunkIndex < totalChunks; chunkIndex += Client.MAX_CONCURRENCY) { + const batchEnd = Math.min(chunkIndex + Client.MAX_CONCURRENCY, totalChunks); - const batchPromises = batch.map(async (chunkInfo) => { - const chunkHeaders = { ...headers }; - chunkHeaders['content-range'] = `bytes ${chunkInfo.start}-${chunkInfo.end - 1}/${file.size}`; - chunkHeaders['x-{{spec.title | caseLower }}-id'] = firstResponse.$id; + const batchPromises = []; + for (let i = chunkIndex; i < batchEnd; i++) { + const start = i * Client.CHUNK_SIZE; + const end = Math.min(start + Client.CHUNK_SIZE, file.size); - const payload = { ...originalPayload }; - payload[fileParam] = new File([chunkInfo.chunk], file.name); - - try { - const chunkResponse = await this.call(method, url, chunkHeaders, payload); - return { - success: true, - response: chunkResponse, - chunkInfo, - error: null - }; - } catch (error) { - return { - success: false, - response: null, - chunkInfo, - error - }; - } - }); + batchPromises.push((async () => { + const chunk = file.slice(start, end); + const chunkHeaders = { ...headers }; + chunkHeaders['content-range'] = `bytes ${start}-${end - 1}/${file.size}`; + chunkHeaders['x-{{spec.title | caseLower}}-id'] = firstResponse.$id; + + const payload = { ...originalPayload }; + payload[fileParam] = new File([chunk], file.name); + + try { + const chunkResponse = await this.call(method, url, chunkHeaders, payload); + return { + success: true, + response: chunkResponse, + chunkInfo: { index: i, start, end }, + error: null + }; + } catch (error) { + return { + success: false, + response: null, + chunkInfo: { index: i, start, end }, + error + }; + } + })()); + } + const batchResults = await Promise.all(batchPromises); - + const failures = batchResults.filter(result => !result.success); if (failures.length > 0) { const errorMessages = failures.map(f => `Chunk ${f.chunkInfo.index}: ${f.error}`); From 7540fd6e63d02b15c47039037dcfab02dcd6d18f Mon Sep 17 00:00:00 2001 From: Chirag Aggarwal Date: Sun, 10 Aug 2025 18:37:52 +0530 Subject: [PATCH 5/5] fix: upload issue --- templates/node/src/client.ts.twig | 6 +++--- templates/web/src/client.ts.twig | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/templates/node/src/client.ts.twig b/templates/node/src/client.ts.twig index 8096c8b82..684b491ae 100644 --- a/templates/node/src/client.ts.twig +++ b/templates/node/src/client.ts.twig @@ -219,10 +219,10 @@ class Client { const firstChunk = file.slice(firstChunkStart, firstChunkEnd); const firstChunkHeaders = { ...headers }; - firstChunkHeaders['content-range'] = `bytes ${firstChunk.start}-${(firstChunk.end ?? 0) - 1}/${file.size}`; + firstChunkHeaders['content-range'] = `bytes ${firstChunkStart}-${firstChunkEnd - 1}/${file.size}`; const firstPayload = { ...originalPayload }; - firstPayload[fileParam] = new File([firstChunk.chunk], file.name); + firstPayload[fileParam] = new File([firstChunk], file.name); const firstResponse = await this.call(method, url, firstChunkHeaders, firstPayload); @@ -231,7 +231,7 @@ class Client { } let completedChunks = 1; - let totalUploaded = firstChunk.end ?? 0; + let totalUploaded = firstChunkEnd; if (onProgress && typeof onProgress === 'function') { onProgress({ diff --git a/templates/web/src/client.ts.twig b/templates/web/src/client.ts.twig index 19f6fc8b0..c37d0cb66 100644 --- a/templates/web/src/client.ts.twig +++ b/templates/web/src/client.ts.twig @@ -647,10 +647,10 @@ class Client { const firstChunk = file.slice(firstChunkStart, firstChunkEnd); const firstChunkHeaders = { ...headers }; - firstChunkHeaders['content-range'] = `bytes ${firstChunk.start}-${(firstChunk.end ?? 0) - 1}/${file.size}`; + firstChunkHeaders['content-range'] = `bytes ${firstChunkStart}-${firstChunkEnd - 1}/${file.size}`; const firstPayload = { ...originalPayload }; - firstPayload[fileParam] = new File([firstChunk.chunk], file.name); + firstPayload[fileParam] = new File([firstChunk], file.name); const firstResponse = await this.call(method, url, firstChunkHeaders, firstPayload); @@ -659,7 +659,7 @@ class Client { } let completedChunks = 1; - let totalUploaded = firstChunk.end ?? 0; + let totalUploaded = firstChunkEnd; if (onProgress && typeof onProgress === 'function') { onProgress({