|
1 | 1 | {% for parameter in method.parameters.all %}
|
2 | 2 | {% if parameter.type == 'file' %}
|
3 |
| - const size = {{ parameter.name | caseCamel | escapeKeyword }}.size; |
4 | 3 |
|
| 4 | + const size = {{ parameter.name | caseCamel | escapeKeyword }}.size; |
| 5 | + |
5 | 6 | const apiHeaders = {
|
6 | 7 | {% for parameter in method.parameters.header %}
|
7 | 8 | '{{ parameter.name }}': ${{ parameter.name | caseCamel | escapeKeyword }},
|
|
28 | 29 | {% endif %}
|
29 | 30 | {% endfor %}
|
30 | 31 |
|
31 |
| - let currentChunk = Buffer.from(''); |
32 |
| - let currentChunkSize = 0; |
33 |
| - let currentChunkStart = 0; |
| 32 | + let currentChunk = 1; |
| 33 | + let currentPosition = 0; |
| 34 | + let uploadableChunk = new Uint8Array(client.CHUNK_SIZE); |
| 35 | + |
34 | 36 |
|
35 |
| - const selfClient = this.client; |
36 |
| - |
37 |
| - async function uploadChunk(lastUpload = false) { |
38 |
| - if(chunksUploaded - 1 >= currentChunkStart / client.CHUNK_SIZE) { |
| 37 | + const uploadChunk = async (lastUpload = false) => { |
| 38 | + if(currentChunk <= chunksUploaded) { |
39 | 39 | return;
|
40 | 40 | }
|
41 |
| - |
42 |
| - const start = currentChunkStart; |
43 |
| - const end = currentChunkStart + currentChunkSize - 1; |
44 | 41 |
|
45 |
| - if(!lastUpload || currentChunkStart !== 0) { |
| 42 | + const start = ((currentChunk - 1) * client.CHUNK_SIZE); |
| 43 | + let end = start + currentPosition - 1; |
| 44 | + |
| 45 | + if(!lastUpload || currentChunk !== 1) { |
46 | 46 | apiHeaders['content-range'] = 'bytes ' + start + '-' + end + '/' + size;
|
47 | 47 | }
|
48 | 48 |
|
| 49 | + let uploadableChunkTrimmed; |
| 50 | + |
| 51 | + if(currentPosition + 1 >= client.CHUNK_SIZE) { |
| 52 | + uploadableChunkTrimmed = uploadableChunk; |
| 53 | + } else { |
| 54 | + uploadableChunkTrimmed = new Uint8Array(currentPosition); |
| 55 | + for(let i = 0; i <= currentPosition; i++) { |
| 56 | + uploadableChunkTrimmed[i] = uploadableChunk[i]; |
| 57 | + } |
| 58 | + } |
| 59 | + |
49 | 60 | if (id) {
|
50 | 61 | apiHeaders['x-{{spec.title | caseLower }}-id'] = id;
|
51 | 62 | }
|
52 | 63 |
|
53 |
| - payload['{{ parameter.name }}'] = { |
54 |
| - type: 'file', |
55 |
| - file: currentChunk, |
56 |
| - filename: {{ parameter.name }}.filename, |
57 |
| - size: currentChunkSize |
58 |
| - }; |
| 64 | + payload['{{ parameter.name }}'] = { type: 'file', file: new File([uploadableChunkTrimmed], {{ parameter.name | caseCamel | escapeKeyword }}.filename), filename: {{ parameter.name | caseCamel | escapeKeyword }}.filename }; |
59 | 65 |
|
60 |
| - response = await selfClient.call('{{ method.method | caseLower }}', apiPath, apiHeaders, payload{% if method.type == 'location' %}, 'arraybuffer'{% endif %}); |
| 66 | + response = await this.client.call('{{ method.method | caseLower }}', apiPath, apiHeaders, payload{% if method.type == 'location' %}, 'arraybuffer'{% endif %}); |
61 | 67 |
|
62 | 68 | if (!id) {
|
63 | 69 | id = response['$id'];
|
64 | 70 | }
|
65 |
| - |
| 71 | + |
66 | 72 | if (onProgress !== null) {
|
67 | 73 | onProgress({
|
68 | 74 | $id: response['$id'],
|
69 |
| - progress: Math.min((start+client.CHUNK_SIZE) * client.CHUNK_SIZE, size) / size * 100, |
| 75 | + progress: Math.min((currentChunk) * client.CHUNK_SIZE, size) / size * 100, |
70 | 76 | sizeUploaded: end+1,
|
71 | 77 | chunksTotal: response['chunksTotal'],
|
72 | 78 | chunksUploaded: response['chunksUploaded']
|
73 | 79 | });
|
74 | 80 | }
|
75 | 81 |
|
76 |
| - currentChunkStart += client.CHUNK_SIZE; |
| 82 | + uploadableChunk = new Uint8Array(client.CHUNK_SIZE); |
| 83 | + currentChunk++; |
| 84 | + currentPosition = 0; |
77 | 85 | }
|
78 | 86 |
|
79 |
| - return await new Promise((resolve, reject) => { |
80 |
| - const writeStream = new Stream.Writable(); |
81 |
| - writeStream._write = async (mainChunk, encoding, callback) => { |
82 |
| - try { |
83 |
| - // Segment incoming chunk into up to 5MB chunks |
84 |
| - const mainChunkSize = Buffer.byteLength(mainChunk); |
85 |
| - const chunksCount = Math.ceil(mainChunkSize / client.CHUNK_SIZE); |
86 |
| - const chunks = []; |
87 |
| - |
88 |
| - for(let i = 0; i < chunksCount; i++) { |
89 |
| - const chunk = mainChunk.slice(i * client.CHUNK_SIZE, (i + 1) * client.CHUNK_SIZE); |
90 |
| - chunks.push(chunk); |
91 |
| - } |
92 |
| - |
93 |
| - for (const chunk of chunks) { |
94 |
| - const chunkSize = Buffer.byteLength(chunk); |
95 |
| - |
96 |
| - if(chunkSize + currentChunkSize == client.CHUNK_SIZE) { |
97 |
| - // Upload chunk |
98 |
| - currentChunk = Buffer.concat([currentChunk, chunk]); |
99 |
| - currentChunkSize = Buffer.byteLength(currentChunk); |
100 |
| - await uploadChunk(); |
101 |
| - currentChunk = Buffer.from(''); |
102 |
| - currentChunkSize = 0; |
103 |
| - } else if(chunkSize + currentChunkSize > client.CHUNK_SIZE) { |
104 |
| - // Upload chunk, put rest into next chunk |
105 |
| - const bytesToUpload = client.CHUNK_SIZE - currentChunkSize; |
106 |
| - const newChunkSection = chunk.slice(0, bytesToUpload); |
107 |
| - currentChunk = Buffer.concat([currentChunk, newChunkSection]); |
108 |
| - currentChunkSize = Buffer.byteLength(currentChunk); |
109 |
| - await uploadChunk(); |
110 |
| - currentChunk = chunk.slice(bytesToUpload, undefined); |
111 |
| - currentChunkSize = chunkSize - bytesToUpload; |
112 |
| - } else { |
113 |
| - // Append into current chunk |
114 |
| - currentChunk = Buffer.concat([currentChunk, chunk]); |
115 |
| - currentChunkSize = chunkSize + currentChunkSize; |
116 |
| - } |
117 |
| - } |
118 |
| - |
119 |
| - callback(); |
120 |
| - } catch (e) { |
121 |
| - callback(e); |
| 87 | + for await (const chunk of {{ parameter.name | caseCamel | escapeKeyword }}.stream) { |
| 88 | + for(const b of chunk) { |
| 89 | + uploadableChunk[currentPosition] = b; |
| 90 | + |
| 91 | + currentPosition++; |
| 92 | + if(currentPosition >= client.CHUNK_SIZE) { |
| 93 | + await uploadChunk(); |
| 94 | + currentPosition = 0; |
122 | 95 | }
|
123 | 96 | }
|
| 97 | + } |
124 | 98 |
|
125 |
| - writeStream.on("finish", async () => { |
126 |
| - if(currentChunkSize > 0) { |
127 |
| - try { |
128 |
| - await uploadChunk(true); |
129 |
| - } catch (e) { |
130 |
| - reject(e); |
131 |
| - } |
132 |
| - } |
133 |
| - |
134 |
| - resolve(response); |
135 |
| - }); |
| 99 | + if (currentPosition > 0) { // Check if there's any remaining data for the last chunk |
| 100 | + await uploadChunk(true); |
| 101 | + } |
136 | 102 |
|
137 |
| - writeStream.on("error", (err) => { |
138 |
| - reject(err); |
139 |
| - }); |
140 |
| - |
141 |
| - {{ parameter.name | caseCamel | escapeKeyword }}.stream.pipe(writeStream); |
142 |
| - }); |
| 103 | + return response; |
143 | 104 | {% endif %}
|
144 | 105 | {% endfor %}
|
0 commit comments