Skip to content

Commit f2fe2f8

Browse files
committed
Finish chunked
1 parent a94097c commit f2fe2f8

File tree

2 files changed

+68
-27
lines changed

2 files changed

+68
-27
lines changed

examples/server.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,11 @@ async fn accept(addr: String, stream: TcpStream) -> Result<(), async_h1::Excepti
1818
async {
1919
let resp = Response::new(StatusCode::Ok)
2020
.set_header("Content-Type", "text/plain")?
21-
.set_body(io::Cursor::new(vec![
22-
0x48u8, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0x77, 0x6F, 0x72, 0x6C, 0x64, 0x21,
23-
]));
21+
.set_body_string("Hello".into())?;
22+
// To try chunked encoding, replace `set_body_string` with the following method call
23+
// .set_body(io::Cursor::new(vec![
24+
// 0x48u8, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0x77, 0x6F, 0x72, 0x6C, 0x64, 0x21,
25+
// ]));
2426
Ok(resp)
2527
}
2628
})

src/server.rs

Lines changed: 63 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,11 @@ enum EncoderState {
101101
body_bytes_read: usize,
102102
body_len: usize,
103103
},
104-
Chunked,
104+
UncomputedChunked,
105+
ComputedChunked {
106+
chunk: io::Cursor<Vec<u8>>,
107+
is_last: bool,
108+
},
105109
Done,
106110
}
107111

@@ -149,13 +153,12 @@ impl Read for Encoder {
149153
fn poll_read(
150154
mut self: Pin<&mut Self>,
151155
cx: &mut Context<'_>,
152-
buf: &mut [u8],
156+
mut buf: &mut [u8],
153157
) -> Poll<io::Result<usize>> {
154158
// we must keep track how many bytes of the head and body we've read
155159
// in this call of `poll_read`
156160
let mut bytes_read = 0;
157161
loop {
158-
println!("{:?}", self.state);
159162
match self.state {
160163
EncoderState::Start => {
161164
// Encode the headers to a buffer, the first time we poll
@@ -181,14 +184,18 @@ impl Read for Encoder {
181184
// reading the head and can transition to reading the body
182185
if head_bytes_read == head_len {
183186
// The response length lets us know if we are encoding
184-
// our body in chunks or now
187+
// our body in chunks or not
185188
self.state = match self.res.len() {
186189
Some(body_len) => EncoderState::Body {
187190
body_bytes_read: 0,
188191
body_len,
189192
},
190-
None => EncoderState::Chunked,
193+
None => EncoderState::UncomputedChunked,
191194
};
195+
} else {
196+
// If we haven't read the entire header it means `buf` isn't
197+
// big enough. Break out of loop and return from `poll_read`
198+
break;
192199
}
193200
}
194201
EncoderState::Body {
@@ -203,9 +210,10 @@ impl Read for Encoder {
203210
if bytes_read == buf.len() {
204211
break;
205212
}
206-
// figure out how many bytes we can read
213+
214+
// Figure out how many bytes we can read.
207215
let upper_bound = (bytes_read + body_len - body_bytes_read).min(buf.len());
208-
// Read bytes, and update internal tracking stuff.
216+
// Read bytes from body
209217
let new_body_bytes_read =
210218
ready!(Pin::new(&mut self.res)
211219
.poll_read(cx, &mut buf[bytes_read..upper_bound]))?;
@@ -220,8 +228,8 @@ impl Read for Encoder {
220228
body_len,
221229
body_bytes_read
222230
);
223-
// If we've read the `len` number of bytes or the stream no longer gives bytes, end.
224-
self.state = if body_len == body_bytes_read || body_bytes_read == 0 {
231+
// If we've read the `len` number of bytes, end
232+
self.state = if body_len == body_bytes_read {
225233
EncoderState::Done
226234
} else {
227235
EncoderState::Body {
@@ -230,15 +238,15 @@ impl Read for Encoder {
230238
}
231239
};
232240
}
233-
EncoderState::Chunked => {
234-
// ensure we have at least room for 1 more byte in our buffer
235-
if bytes_read == buf.len() {
236-
break;
237-
}
238-
241+
EncoderState::UncomputedChunked => {
239242
// We can read a maximum of the buffer's total size
240243
// minus what we've already filled the buffer with
241244
let buffer_remaining = buf.len() - bytes_read;
245+
246+
// ensure we have at least room for 1 byte in our buffer
247+
if buffer_remaining == 0 {
248+
break;
249+
}
242250
// we must allocate a separate buffer for the chunk data
243251
// since we first need to know its length before writing
244252
// it into the actual buffer
@@ -282,22 +290,53 @@ impl Read for Encoder {
282290
buf[bytes_read] = b'\r';
283291
buf[bytes_read + 1] = b'\n';
284292
bytes_read += 2;
293+
294+
if chunk_length == 0 {
295+
self.state = EncoderState::Done;
296+
}
285297
} else {
286-
unimplemented!("TODO: handle when buf isn't big enough");
287-
}
288-
const EMPTY_CHUNK: &[u8; 5] = b"0\r\n\r\n";
298+
let mut chunk = vec![0; total_chunk_size];
299+
let mut bytes_written = 0;
300+
// Write the chunk length into the buffer
301+
chunk[0..chunk_length_bytes_len].copy_from_slice(chunk_length_bytes);
302+
bytes_written += chunk_length_bytes_len;
289303

290-
buf[bytes_read..bytes_read + EMPTY_CHUNK.len()].copy_from_slice(EMPTY_CHUNK);
304+
// follow chunk length with CRLF
305+
chunk[bytes_written] = b'\r';
306+
chunk[bytes_written + 1] = b'\n';
307+
bytes_written += 2;
291308

292-
// if body_bytes_read == 0 {
293-
bytes_read += 7;
294-
self.state = EncoderState::Done;
295-
// }
309+
// copy chunk into buf
310+
chunk[bytes_written..bytes_written + chunk_length]
311+
.copy_from_slice(&chunk_buf[..chunk_length]);
312+
bytes_written += chunk_length;
313+
314+
// follow chunk with CRLF
315+
chunk[bytes_written] = b'\r';
316+
chunk[bytes_written + 1] = b'\n';
317+
bytes_read += 2;
318+
self.state = EncoderState::ComputedChunked {
319+
chunk: io::Cursor::new(chunk),
320+
is_last: chunk_length == 0,
321+
};
322+
}
323+
}
324+
EncoderState::ComputedChunked {
325+
ref mut chunk,
326+
is_last,
327+
} => {
328+
bytes_read += ready!(Pin::new(chunk).poll_read(cx, &mut buf))?;
329+
if bytes_read == 0 {
330+
self.state = match is_last {
331+
true => EncoderState::Done,
332+
false => EncoderState::UncomputedChunked,
333+
}
334+
}
335+
break;
296336
}
297337
EncoderState::Done => break,
298338
}
299339
}
300-
println!("{:?}", std::str::from_utf8(&buf[0..bytes_read]));
301340

302341
Poll::Ready(Ok(bytes_read as usize))
303342
}

0 commit comments

Comments
 (0)