Skip to content

Commit afcc282

Browse files
committed
refactor: Remove unnecessary use of VecDeque
revert 190c29f
1 parent 5187de6 commit afcc282

File tree

1 file changed

+3
-4
lines changed

1 file changed

+3
-4
lines changed

src/parser.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ use crate::term::Context;
88
pub use crate::term::Notation::*;
99
use crate::term::Term::*;
1010
use crate::term::{abs, app, Notation, Term};
11-
use std::collections::VecDeque;
1211
use std::error::Error;
1312
use std::fmt;
1413

@@ -149,14 +148,14 @@ pub fn tokenize_cla(input: &str) -> Result<Vec<CToken>, ParseError> {
149148

150149
#[doc(hidden)]
151150
pub fn convert_classic_tokens(ctx: &Context, tokens: &[CToken]) -> Result<Vec<Token>, ParseError> {
152-
let mut stack = VecDeque::with_capacity(tokens.len());
151+
let mut stack = Vec::with_capacity(tokens.len());
153152
stack.extend(ctx.iter().rev());
154153
_convert_classic_tokens(tokens, &mut stack, &mut 0)
155154
}
156155

157156
fn _convert_classic_tokens<'t>(
158157
tokens: &'t [CToken],
159-
stack: &mut VecDeque<&'t str>,
158+
stack: &mut Vec<&'t str>,
160159
pos: &mut usize,
161160
) -> Result<Vec<Token>, ParseError> {
162161
let mut output = Vec::with_capacity(tokens.len() - *pos);
@@ -166,7 +165,7 @@ fn _convert_classic_tokens<'t>(
166165
match *token {
167166
CLambda(ref name) => {
168167
output.push(Lambda);
169-
stack.push_back(name);
168+
stack.push(name);
170169
inner_stack_count += 1;
171170
}
172171
CLparen => {

0 commit comments

Comments
 (0)