|
74 | 74 |
|
75 | 75 | return await new Promise((resolve, reject) => {
|
76 | 76 | const writeStream = new Stream.Writable();
|
77 |
| - writeStream._write = async (mainChunk, encoding, next) => { |
78 |
| - // Segment incoming chunk into up to 5MB chunks |
79 |
| - const mainChunkSize = Buffer.byteLength(mainChunk); |
80 |
| - const chunksCount = Math.ceil(mainChunkSize / client.CHUNK_SIZE); |
81 |
| - const chunks = []; |
82 |
| - |
83 |
| - for(let i = 0; i < chunksCount; i++) { |
84 |
| - const chunk = mainChunk.slice(i * client.CHUNK_SIZE, client.CHUNK_SIZE); |
85 |
| - chunks.push(chunk); |
86 |
| - } |
| 77 | + writeStream._write = async (mainChunk, encoding, callback) => { |
| 78 | + try { |
| 79 | + // Segment incoming chunk into up to 5MB chunks |
| 80 | + const mainChunkSize = Buffer.byteLength(mainChunk); |
| 81 | + const chunksCount = Math.ceil(mainChunkSize / client.CHUNK_SIZE); |
| 82 | + const chunks = []; |
| 83 | + |
| 84 | + for(let i = 0; i < chunksCount; i++) { |
| 85 | + const chunk = mainChunk.slice(i * client.CHUNK_SIZE, client.CHUNK_SIZE); |
| 86 | + chunks.push(chunk); |
| 87 | + } |
87 | 88 |
|
88 |
| - for (const chunk of chunks) { |
89 |
| - const chunkSize = Buffer.byteLength(chunk); |
90 |
| - |
91 |
| - if(chunkSize + currentChunkSize == client.CHUNK_SIZE) { |
92 |
| - // Upload chunk |
93 |
| - currentChunk = Buffer.concat([currentChunk, chunk]); |
94 |
| - await uploadChunk(); |
95 |
| - currentChunk = Buffer.from(''); |
96 |
| - currentChunkSize = 0; |
97 |
| - } else if(chunkSize + currentChunkSize > client.CHUNK_SIZE) { |
98 |
| - // Upload chunk, put rest into next chunk |
99 |
| - const bytesToUpload = client.CHUNK_SIZE - currentChunkSize; |
100 |
| - const newChunkSection = chunk.slice(0, bytesToUpload); |
101 |
| - currentChunk = Buffer.concat([currentChunk, newChunkSection]); |
102 |
| - currentChunkSize = Buffer.byteLength(currentChunk); |
103 |
| - await uploadChunk(); |
104 |
| - currentChunk = chunk.slice(bytesToUpload, undefined); |
105 |
| - currentChunkSize = chunkSize - bytesToUpload; |
106 |
| - } else { |
107 |
| - // Append into current chunk |
108 |
| - currentChunk = Buffer.concat([currentChunk, chunk]); |
109 |
| - currentChunkSize = chunkSize + currentChunkSize; |
| 89 | + for (const chunk of chunks) { |
| 90 | + const chunkSize = Buffer.byteLength(chunk); |
| 91 | + |
| 92 | + if(chunkSize + currentChunkSize == client.CHUNK_SIZE) { |
| 93 | + // Upload chunk |
| 94 | + currentChunk = Buffer.concat([currentChunk, chunk]); |
| 95 | + await uploadChunk(); |
| 96 | + currentChunk = Buffer.from(''); |
| 97 | + currentChunkSize = 0; |
| 98 | + } else if(chunkSize + currentChunkSize > client.CHUNK_SIZE) { |
| 99 | + // Upload chunk, put rest into next chunk |
| 100 | + const bytesToUpload = client.CHUNK_SIZE - currentChunkSize; |
| 101 | + const newChunkSection = chunk.slice(0, bytesToUpload); |
| 102 | + currentChunk = Buffer.concat([currentChunk, newChunkSection]); |
| 103 | + currentChunkSize = Buffer.byteLength(currentChunk); |
| 104 | + await uploadChunk(); |
| 105 | + currentChunk = chunk.slice(bytesToUpload, undefined); |
| 106 | + currentChunkSize = chunkSize - bytesToUpload; |
| 107 | + } else { |
| 108 | + // Append into current chunk |
| 109 | + currentChunk = Buffer.concat([currentChunk, chunk]); |
| 110 | + currentChunkSize = chunkSize + currentChunkSize; |
| 111 | + } |
110 | 112 | }
|
111 |
| - } |
112 | 113 |
|
113 |
| - next(); |
| 114 | + callback(); |
| 115 | + } catch (e) { |
| 116 | + callback(e); |
| 117 | + } |
114 | 118 | }
|
115 | 119 |
|
116 | 120 | writeStream.on("finish", async () => {
|
117 | 121 | if(currentChunkSize > 0) {
|
118 |
| - await uploadChunk(true); |
| 122 | + try { |
| 123 | + await uploadChunk(true); |
| 124 | + } catch (e) { |
| 125 | + reject(e); |
| 126 | + } |
119 | 127 | }
|
120 | 128 |
|
121 | 129 | resolve(response);
|
|
0 commit comments