Skip to content

Commit 7a2a71a

Browse files
authored
Merge pull request #83 from bpot/bp/chunked-clear-buffer
Fix handling of large chunks in chunked decoder
2 parents fc82282 + e9aca44 commit 7a2a71a

File tree

1 file changed

+31
-1
lines changed

1 file changed

+31
-1
lines changed

src/chunked.rs

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,11 @@ impl<R: Read + Unpin> ChunkedDecoder<R> {
7272
let to_read_buf = std::cmp::min(to_read, pos.len());
7373
buf[..to_read_buf].copy_from_slice(&buffer[new_pos.start..new_pos.start + to_read_buf]);
7474

75-
new_pos.start += to_read_buf;
75+
if new_pos.start + to_read_buf == new_pos.end {
76+
new_pos = 0..0
77+
} else {
78+
new_pos.start += to_read_buf;
79+
}
7680
new_current += to_read_buf as u64;
7781
read += to_read_buf;
7882

@@ -537,6 +541,32 @@ mod tests {
537541
});
538542
}
539543

544+
#[test]
545+
fn test_chunked_big() {
546+
async_std::task::block_on(async move {
547+
let mut input: Vec<u8> = "800\r\n".as_bytes().to_vec();
548+
input.extend(vec![b'X'; 2048]);
549+
input.extend("\r\n1800\r\n".as_bytes());
550+
input.extend(vec![b'Y'; 6144]);
551+
input.extend("\r\n800\r\n".as_bytes());
552+
input.extend(vec![b'Z'; 2048]);
553+
input.extend("\r\n0\r\n\r\n".as_bytes());
554+
555+
let (s, _r) = async_std::sync::channel(1);
556+
let sender = TrailersSender::new(s);
557+
let mut decoder = ChunkedDecoder::new(async_std::io::Cursor::new(input), sender);
558+
559+
let mut output = String::new();
560+
decoder.read_to_string(&mut output).await.unwrap();
561+
562+
let mut expected = vec![b'X'; 2048];
563+
expected.extend(vec![b'Y'; 6144]);
564+
expected.extend(vec![b'Z'; 2048]);
565+
assert_eq!(output.len(), 10240);
566+
assert_eq!(output.as_bytes(), expected.as_slice());
567+
});
568+
}
569+
540570
#[test]
541571
fn test_chunked_mdn() {
542572
async_std::task::block_on(async move {

0 commit comments

Comments
 (0)