Skip to content

Commit 9e0db6c

Browse files
committed
refactor: remove writer from tokenizer string parser
1 parent 0a05544 commit 9e0db6c

File tree

5 files changed

+80
-237
lines changed

5 files changed

+80
-237
lines changed

src/parsers/error.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -62,20 +62,20 @@ pub enum Error {
6262
///
6363
/// The string parser found an invalid byte for the string length. The
6464
/// length can only be made of digits (0-9).
65-
#[error("Invalid string length byte, expected a digit; {0}; {1}")]
66-
InvalidStringLengthByte(ReadContext, WriteContext),
65+
#[error("Invalid string length byte, expected a digit; {0}")]
66+
InvalidStringLengthByte(ReadContext),
6767

6868
/// Unexpected end of input parsing string length.
6969
///
7070
/// The input ends before the string length ends.
71-
#[error("Unexpected end of input parsing string length; {0}; {1}")]
72-
UnexpectedEndOfInputParsingStringLength(ReadContext, WriteContext),
71+
#[error("Unexpected end of input parsing string length; {0}")]
72+
UnexpectedEndOfInputParsingStringLength(ReadContext),
7373

7474
/// Unexpected end of input parsing string value.
7575
///
7676
/// The input ends before the string value ends.
77-
#[error("Unexpected end of input parsing string value; {0}; {1}")]
78-
UnexpectedEndOfInputParsingStringValue(ReadContext, WriteContext),
77+
#[error("Unexpected end of input parsing string value; {0}")]
78+
UnexpectedEndOfInputParsingStringValue(ReadContext),
7979

8080
// Lists
8181
/// Unexpected end of input parsing list. Expecting first list item or list end.

src/parsers/mod.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,7 @@
44
//! ``BencodeParser`` is the main parser. It is generic over the type of the
55
//! input buffer.
66
pub mod error;
7-
pub mod integer;
87
pub mod stack;
9-
pub mod string;
108
pub mod tokenizer;
119

1210
/* TODO:

src/parsers/tokenizer/integer.rs

Lines changed: 13 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -134,42 +134,30 @@ fn next_byte<R: Read>(reader: &mut ByteReader<R>) -> Result<u8, Error> {
134134

135135
#[cfg(test)]
136136
mod tests {
137-
use crate::{
138-
parsers::{error::Error, integer::parse},
139-
rw::{byte_reader::ByteReader, string_writer::StringWriter},
140-
};
137+
use crate::{parsers::error::Error, rw::byte_reader::ByteReader};
141138

142-
fn bencode_to_json_unchecked(input_buffer: &[u8]) -> String {
143-
let mut output = String::new();
139+
use super::parse;
144140

145-
parse_bencode(input_buffer, &mut output).expect("Bencode to JSON conversion failed");
146-
147-
output
141+
fn bencode_to_json_unchecked(input_buffer: &[u8]) -> Vec<u8> {
142+
parse_bencode(input_buffer).expect("Bencode to JSON conversion failed")
148143
}
149144

150-
fn try_bencode_to_json(input_buffer: &[u8]) -> Result<String, Error> {
151-
let mut output = String::new();
152-
153-
match parse_bencode(input_buffer, &mut output) {
154-
Ok(_value) => Ok(output),
155-
Err(err) => Err(err),
156-
}
145+
fn try_bencode_to_json(input_buffer: &[u8]) -> Result<Vec<u8>, Error> {
146+
parse_bencode(input_buffer)
157147
}
158148

159-
fn parse_bencode(input_buffer: &[u8], output: &mut String) -> Result<Vec<u8>, Error> {
149+
fn parse_bencode(input_buffer: &[u8]) -> Result<Vec<u8>, Error> {
160150
let mut reader = ByteReader::new(input_buffer);
161151

162-
let mut writer = StringWriter::new(output);
163-
164-
parse(&mut reader, &mut writer)
152+
parse(&mut reader)
165153
}
166154

167155
mod for_helpers {
168156
use crate::parsers::tokenizer::integer::tests::try_bencode_to_json;
169157

170158
#[test]
171159
fn bencode_to_json_wrapper_succeeds() {
172-
assert_eq!(try_bencode_to_json(b"i0e").unwrap(), "0".to_string());
160+
assert_eq!(try_bencode_to_json(b"i0e").unwrap(), "0".as_bytes());
173161
}
174162

175163
#[test]
@@ -180,22 +168,22 @@ mod tests {
180168

181169
#[test]
182170
fn zero() {
183-
assert_eq!(bencode_to_json_unchecked(b"i0e"), "0".to_string());
171+
assert_eq!(bencode_to_json_unchecked(b"i0e"), "0".as_bytes());
184172
}
185173

186174
#[test]
187175
fn one_digit_integer() {
188-
assert_eq!(bencode_to_json_unchecked(b"i1e"), "1".to_string());
176+
assert_eq!(bencode_to_json_unchecked(b"i1e"), "1".as_bytes());
189177
}
190178

191179
#[test]
192180
fn two_digits_integer() {
193-
assert_eq!(bencode_to_json_unchecked(b"i42e"), "42".to_string());
181+
assert_eq!(bencode_to_json_unchecked(b"i42e"), "42".as_bytes());
194182
}
195183

196184
#[test]
197185
fn negative_integer() {
198-
assert_eq!(bencode_to_json_unchecked(b"i-1e"), "-1".to_string());
186+
assert_eq!(bencode_to_json_unchecked(b"i-1e"), "-1".as_bytes());
199187
}
200188

201189
mod it_should_fail {

src/parsers/tokenizer/mod.rs

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,10 @@ use std::io::{self, Read};
66

77
use super::error::{self, ReadContext};
88

9-
use crate::rw::{byte_reader::ByteReader, byte_writer::ByteWriter};
9+
use crate::rw::byte_reader::ByteReader;
1010

1111
/* TODO:
1212
13-
- Remove writer from tokenizer.
1413
- Implement trait Iterator for tokenizer.
1514
1615
*/
@@ -51,20 +50,15 @@ impl<R: Read> BencodeTokenizer<R> {
5150
///
5251
/// - It can't read from the input.
5352
pub fn next_token(&mut self) -> Result<Option<BencodeToken>, error::Error> {
54-
let capture_output = Vec::new();
55-
let mut null_writer = ByteWriter::new(capture_output);
56-
57-
let opt_peeked_byte = Self::peek_byte(&mut self.byte_reader)?;
58-
59-
match opt_peeked_byte {
53+
match Self::peek_byte(&mut self.byte_reader)? {
6054
Some(peeked_byte) => {
6155
match peeked_byte {
6256
BENCODE_BEGIN_INTEGER => {
6357
let value = integer::parse(&mut self.byte_reader)?;
6458
Ok(Some(BencodeToken::Integer(value)))
6559
}
6660
b'0'..=b'9' => {
67-
let value = string::parse(&mut self.byte_reader, &mut null_writer)?;
61+
let value = string::parse(&mut self.byte_reader)?;
6862
Ok(Some(BencodeToken::String(value)))
6963
}
7064
BENCODE_BEGIN_LIST => {

0 commit comments

Comments
 (0)