Skip to content

Commit ded1807

Browse files
authored
Merge pull request #8840 from liranmauda/liran-backport-into-5.14
[Backport into 5.14] boto fix and version bump
2 parents 96fda78 + 5f40621 commit ded1807

File tree

5 files changed

+472
-36
lines changed

5 files changed

+472
-36
lines changed

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "noobaa-core",
3-
"version": "5.14.16",
3+
"version": "5.14.17",
44
"license": "SEE LICENSE IN LICENSE",
55
"description": "",
66
"homepage": "https://github.com/noobaa/noobaa-core",
Lines changed: 252 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,252 @@
1+
/* Copyright (C) 2025 NooBaa */
2+
/* eslint-disable no-undef */
3+
'use strict';
4+
5+
const stream = require('stream');
6+
const assert = require('assert');
7+
const ChunkedContentDecoder = require('../../../util/chunked_content_decoder');
8+
const buffer_utils = require('../../../util/buffer_utils');
9+
10+
describe('ChunkedContentDecoder', function() {
11+
12+
// Reminder about chunk structure:
13+
// <hex bytes of data>\r\n
14+
// <data>
15+
//....
16+
// the end of the chunk:
17+
// 0\r\n
18+
// \r\n
19+
//
20+
// The following example was copied from:
21+
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Transfer-Encoding
22+
// 7\r\n
23+
// Mozilla\r\n
24+
// 11\r\n
25+
// Developer Network\r\n
26+
// 0\r\n
27+
// \r\n
28+
29+
// for easier debugging you can set the number of iteration here:
30+
const NUMBER_OF_ITERATIONS_IMPORTANT_CASE = 100;
31+
const NUMBER_OF_ITERATIONS_DEFAULT = 2;
32+
33+
describe('expected to parse the input', function() {
34+
test_parse_output({
35+
name: 'one_chunk',
36+
input: '3\r\n' +
37+
'foo\r\n' +
38+
'0\r\n' +
39+
'\r\n',
40+
output: 'foo',
41+
iterations: NUMBER_OF_ITERATIONS_DEFAULT,
42+
});
43+
44+
test_parse_output({
45+
name: 'two_chunks',
46+
input: '3\r\n' +
47+
'foo\r\n' +
48+
'3\r\n' +
49+
'bar\r\n' +
50+
'0\r\n' +
51+
'\r\n',
52+
output: 'foobar',
53+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
54+
});
55+
56+
test_parse_output({
57+
name: 'three_chunks_with_trailers',
58+
input: '3\r\n' +
59+
'foo\r\n' +
60+
'6\r\n' +
61+
'barbaz\r\n' +
62+
'ff\r\n' +
63+
'f'.repeat(255) + '\r\n' +
64+
'0\r\n' +
65+
'x-trailer-1:value\r\n' +
66+
'x-trailer-2:value\r\n' +
67+
'\r\n',
68+
output: 'foobarbaz' + 'f'.repeat(255),
69+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
70+
check: decoder => {
71+
assert.deepStrictEqual(decoder.trailers, [
72+
'x-trailer-1:value',
73+
'x-trailer-2:value',
74+
]);
75+
},
76+
});
77+
78+
test_parse_output({
79+
name: 'no_chunk_with_trailers',
80+
input: '0\r\n' +
81+
'movie:trailer\r\n' +
82+
'semi:trailer\r\n' +
83+
'\r\n',
84+
output: '',
85+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
86+
check: decoder => {
87+
assert.deepStrictEqual(decoder.trailers, [
88+
'movie:trailer',
89+
'semi:trailer',
90+
]);
91+
},
92+
});
93+
94+
test_parse_output({
95+
name: 'one_chunk_with_extension',
96+
input: '3;crc=1a2b3c4d\r\n' +
97+
'EXT\r\n' +
98+
'0\r\n' +
99+
'\r\n',
100+
output: 'EXT',
101+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
102+
});
103+
104+
test_parse_output({
105+
name: 'one_chunk_with_extension_and_trailer',
106+
input: '3;crc=1a2b3c4d\r\n' +
107+
'EXT\r\n' +
108+
'0\r\n' +
109+
create_trailers(1) +
110+
'\r\n',
111+
output: 'EXT',
112+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
113+
});
114+
115+
test_parse_output({
116+
name: 'one_chunk_with_trailers', // lower than MAX_CHUNK_HEADER_SIZE
117+
input: '3\r\n' +
118+
'foo\r\n' +
119+
'0\r\n' +
120+
create_trailers(19) +
121+
'\r\n',
122+
output: 'foo',
123+
iterations: NUMBER_OF_ITERATIONS_DEFAULT,
124+
});
125+
126+
});
127+
128+
describe('expected to have an error on parse', function() {
129+
130+
test_parse_error({
131+
name: 'chunk_size_not_hex',
132+
input: 'invalid\r\n\r\n',
133+
error_pos: 7, // end of header
134+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
135+
});
136+
137+
test_parse_error({
138+
name: 'chunk_size_too_big', // according to MAX_CHUNK_SIZE
139+
input: '10000000001\r\n\r\n',
140+
error_pos: 11, // end of header
141+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
142+
});
143+
144+
test_parse_error({
145+
name: 'header_too_long', // according to MAX_CHUNK_HEADER_SIZE
146+
input: '0' + ';'.repeat(1024) + '\r\n\r\n',
147+
error_pos: 1025, // end of header
148+
iterations: NUMBER_OF_ITERATIONS_IMPORTANT_CASE,
149+
});
150+
151+
test_parse_error({
152+
name: 'too_many_trailers', // according to MAX_CHUNK_HEADER_SIZE
153+
input: '3\r\n' +
154+
'foo\r\n' +
155+
'0\r\n' +
156+
create_trailers(21) +
157+
'\r\n',
158+
error_pos: 420, // last trailer position
159+
iterations: NUMBER_OF_ITERATIONS_DEFAULT,
160+
});
161+
162+
});
163+
164+
/**
165+
* @param {{
166+
* name: string,
167+
* input: string,
168+
* output: string,
169+
* iterations?: number
170+
* check?: (decoder: ChunkedContentDecoder) => void,
171+
* }} params
172+
*/
173+
function test_parse_output({ name, input, output, check, iterations = NUMBER_OF_ITERATIONS_DEFAULT }) {
174+
it(name, async function() {
175+
for (let i = 0; i < iterations; ++i) {
176+
const decoder = new ChunkedContentDecoder();
177+
console.log(`test_parse_output(${name}): decoder input`, input, decoder.get_debug_info());
178+
const readable = new stream.Readable({
179+
read() {
180+
// split at random position
181+
const sp = Math.floor(input.length * Math.random());
182+
this.push(input.slice(0, sp));
183+
this.push(input.slice(sp));
184+
this.push(null);
185+
}
186+
});
187+
const writable = buffer_utils.write_stream();
188+
await stream.promises.pipeline(readable, decoder, writable);
189+
const decoded = buffer_utils.join(writable.buffers, writable.total_length);
190+
console.log(`test_parse_output(${name}): decoder returned`, decoded, decoder.get_debug_info());
191+
assert.deepStrictEqual(decoded, Buffer.from(output));
192+
if (check) check(decoder);
193+
}
194+
});
195+
}
196+
197+
/**
198+
* @param {{
199+
* name: string,
200+
* input: string,
201+
* error_pos?: number,
202+
* iterations?: number
203+
* }} params
204+
*/
205+
function test_parse_error({ name, input, error_pos, iterations = NUMBER_OF_ITERATIONS_DEFAULT }) {
206+
it(name, async function() {
207+
for (let i = 0; i < iterations; ++i) {
208+
const decoder = new ChunkedContentDecoder();
209+
console.log(`test_parse_error(${name}): decoder input`, input, decoder.get_debug_info());
210+
console.log(name, 'decode', decoder);
211+
try {
212+
const readable = new stream.Readable({
213+
read() {
214+
// split at random position
215+
const sp = Math.floor(input.length * Math.random());
216+
this.push(input.slice(0, sp));
217+
this.push(input.slice(sp));
218+
this.push(null);
219+
}
220+
});
221+
const writable = buffer_utils.write_stream();
222+
await stream.promises.pipeline(readable, decoder, writable);
223+
const decoded = buffer_utils.join(writable.buffers, writable.total_length);
224+
console.log(`test_parse_error(${name}): decoder returned`, decoded, decoder.get_debug_info());
225+
assert.fail('Should have failed');
226+
} catch (err) {
227+
if (err.message === 'Should have failed') throw err;
228+
console.log(`test_parse_error(${name}): decoder caught`, err, decoder.get_debug_info());
229+
if (error_pos !== undefined) {
230+
assert.strictEqual(decoder.stream_pos, error_pos);
231+
}
232+
}
233+
}
234+
});
235+
}
236+
237+
238+
/**
239+
* create_trailers will return a single string with the number of trailers
240+
* @param {number} number_of_trailers
241+
* @returns string
242+
*/
243+
function create_trailers(number_of_trailers) {
244+
const trailers = [];
245+
for (let index = 1; index <= number_of_trailers; ++index) {
246+
const trailer = `x-trailer-${index}:value\r\n`;
247+
trailers.push(trailer);
248+
}
249+
return trailers.join('');
250+
}
251+
252+
});

0 commit comments

Comments
 (0)