Skip to content

Commit ac37a11

Browse files
author
Veetaha
committed
Reimplemented lexer with vectors instead of iterators
1 parent ad24976 commit ac37a11

File tree

10 files changed

+250
-196
lines changed

10 files changed

+250
-196
lines changed

crates/ra_ide/src/references/rename.rs

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
33
use hir::ModuleSource;
44
use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt};
5-
use ra_syntax::{algo::find_node_at_offset, ast, tokenize, AstNode, SyntaxKind, SyntaxNode};
5+
use ra_syntax::{algo::find_node_at_offset, ast, single_token, AstNode, SyntaxKind, SyntaxNode};
66
use ra_text_edit::TextEdit;
77

88
use crate::{
@@ -17,11 +17,9 @@ pub(crate) fn rename(
1717
position: FilePosition,
1818
new_name: &str,
1919
) -> Option<RangeInfo<SourceChange>> {
20-
let tokens = tokenize(new_name);
21-
if tokens.len() != 1
22-
|| (tokens[0].kind != SyntaxKind::IDENT && tokens[0].kind != SyntaxKind::UNDERSCORE)
23-
{
24-
return None;
20+
match single_token(new_name)?.token.kind {
21+
SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (),
22+
_ => return None,
2523
}
2624

2725
let parse = db.parse(position.file_id);

crates/ra_mbe/src/subtree_source.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
//! FIXME: write short doc here
22
33
use ra_parser::{Token, TokenSource};
4-
use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T};
4+
use ra_syntax::{single_token, SmolStr, SyntaxKind, SyntaxKind::*, T};
55
use std::cell::{Cell, Ref, RefCell};
66
use tt::buffer::{Cursor, TokenBuffer};
77

@@ -129,8 +129,10 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
129129
}
130130

131131
fn convert_literal(l: &tt::Literal) -> TtToken {
132-
let kind =
133-
classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() {
132+
let kind = single_token(&l.text)
133+
.map(|parsed| parsed.token.kind)
134+
.filter(|kind| kind.is_literal())
135+
.unwrap_or_else(|| match l.text.as_ref() {
134136
"true" => T![true],
135137
"false" => T![false],
136138
_ => panic!("Fail to convert given literal {:#?}", &l),

crates/ra_syntax/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ use crate::syntax_node::GreenNode;
4141
pub use crate::{
4242
algo::InsertPosition,
4343
ast::{AstNode, AstToken},
44-
parsing::{classify_literal, tokenize, Token},
44+
parsing::{first_token, single_token, tokenize, tokenize_append, Token, TokenizeError},
4545
ptr::{AstPtr, SyntaxNodePtr},
4646
syntax_error::{Location, SyntaxError, SyntaxErrorKind},
4747
syntax_node::{

crates/ra_syntax/src/parsing.rs

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,17 @@ mod text_tree_sink;
77
mod reparsing;
88

99
use crate::{syntax_node::GreenNode, SyntaxError};
10+
use text_token_source::TextTokenSource;
11+
use text_tree_sink::TextTreeSink;
1012

11-
pub use self::lexer::{classify_literal, tokenize, Token};
13+
pub use lexer::*;
1214

1315
pub(crate) use self::reparsing::incremental_reparse;
1416

1517
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
16-
let tokens = tokenize(&text);
17-
let mut token_source = text_token_source::TextTokenSource::new(text, &tokens);
18-
let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens);
18+
let ParsedTokens { tokens, errors } = tokenize(&text);
19+
let mut token_source = TextTokenSource::new(text, &tokens);
20+
let mut tree_sink = TextTreeSink::new(text, &tokens, errors);
1921
ra_parser::parse(&mut token_source, &mut tree_sink);
2022
tree_sink.finish()
2123
}

0 commit comments

Comments
 (0)