Skip to content

Commit 952687e

Browse files
committed
chore: Lexer::lex() returns Failable<TokenStream>
1 parent 566b9aa commit 952687e

File tree

4 files changed

+37
-19
lines changed

4 files changed

+37
-19
lines changed

crates/els/file_cache.rs

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ use std::fs::File;
33
use std::io::Read;
44
use std::sync::mpsc::Sender;
55

6-
use erg_common::vfs::VFS;
76
use lsp_types::{
87
DidChangeTextDocumentParams, FileOperationFilter, FileOperationPattern,
98
FileOperationPatternKind, FileOperationRegistrationOptions, OneOf, Position, Range,
@@ -14,13 +13,14 @@ use lsp_types::{
1413
use serde_json::Value;
1514

1615
use erg_common::dict::Dict;
16+
use erg_common::lsp_log;
1717
use erg_common::set::Set;
1818
use erg_common::shared::Shared;
1919
use erg_common::traits::DequeStream;
20+
use erg_common::vfs::VFS;
2021
use erg_compiler::erg_parser::lex::Lexer;
2122
use erg_compiler::erg_parser::token::{Token, TokenCategory, TokenKind, TokenStream};
2223

23-
use crate::_log;
2424
use crate::server::{ELSResult, RedirectableStdout};
2525
use crate::util::{self, NormalizedUrl};
2626

@@ -279,7 +279,13 @@ impl FileCache {
279279
return;
280280
}
281281
}
282-
let token_stream = Lexer::from_str(code.clone()).lex().ok();
282+
let token_stream = match Lexer::from_str(code.clone()).lex() {
283+
Ok(ts) => Some(ts),
284+
Err((ts, es)) => {
285+
lsp_log!("failed to lex: {es}");
286+
Some(ts)
287+
}
288+
};
283289
let ver = ver.unwrap_or({
284290
if let Some(entry) = entry {
285291
entry.ver
@@ -308,7 +314,13 @@ impl FileCache {
308314
let start = util::pos_to_byte_index(&code, old.start);
309315
let end = util::pos_to_byte_index(&code, old.end);
310316
code.replace_range(start..end, new_code);
311-
let token_stream = Lexer::from_str(code.clone()).lex().ok();
317+
let token_stream = match Lexer::from_str(code.clone()).lex() {
318+
Ok(ts) => Some(ts),
319+
Err((ts, es)) => {
320+
lsp_log!("failed to lex: {es}");
321+
Some(ts)
322+
}
323+
};
312324
VFS.update(uri.to_file_path().unwrap(), code.clone());
313325
entry.code = code;
314326
// entry.ver += 1;
@@ -341,7 +353,13 @@ impl FileCache {
341353
code.replace_range(start..end, &change.text);
342354
}
343355
VFS.update(uri.to_file_path().unwrap(), code.clone());
344-
let token_stream = Lexer::from_str(code.clone()).lex().ok();
356+
let token_stream = match Lexer::from_str(code.clone()).lex() {
357+
Ok(ts) => Some(ts),
358+
Err((ts, es)) => {
359+
lsp_log!("failed to lex: {es}");
360+
Some(ts)
361+
}
362+
};
345363
entry.code = code;
346364
entry.ver = params.text_document.version;
347365
entry.token_stream = token_stream;
@@ -356,15 +374,15 @@ impl FileCache {
356374
pub fn rename_files(&mut self, params: &RenameFilesParams) -> ELSResult<()> {
357375
for file in &params.files {
358376
let Ok(old_uri) = NormalizedUrl::parse(&file.old_uri) else {
359-
_log!(self, "failed to parse old uri: {}", file.old_uri);
377+
lsp_log!("failed to parse old uri: {}", file.old_uri);
360378
continue;
361379
};
362380
let Ok(new_uri) = NormalizedUrl::parse(&file.new_uri) else {
363-
_log!(self, "failed to parse new uri: {}", file.new_uri);
381+
lsp_log!("failed to parse new uri: {}", file.new_uri);
364382
continue;
365383
};
366384
let Some(entry) = self.files.borrow_mut().remove(&old_uri) else {
367-
_log!(self, "failed to find old uri: {}", file.old_uri);
385+
lsp_log!("failed to find old uri: {}", file.old_uri);
368386
continue;
369387
};
370388
VFS.rename(

crates/erg_parser/lex.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,13 @@ use crate::token::{Token, TokenCategory, TokenKind, TokenStream};
1616
use TokenKind::*;
1717

1818
pub trait Lexable {
19-
fn lex(code: String) -> Result<TokenStream, LexErrors>;
19+
fn lex(code: String) -> Result<TokenStream, (TokenStream, LexErrors)>;
2020
}
2121

2222
pub struct SimpleLexer {}
2323

2424
impl Lexable for SimpleLexer {
25-
fn lex(code: String) -> Result<TokenStream, LexErrors> {
25+
fn lex(code: String) -> Result<TokenStream, (TokenStream, LexErrors)> {
2626
Lexer::from_str(code).lex()
2727
}
2828
}
@@ -83,7 +83,7 @@ impl Runnable for LexerRunner {
8383
let lexer = Lexer::from_str(self.cfg_mut().input.read());
8484
let ts = lexer
8585
.lex()
86-
.map_err(|errs| LexerRunnerErrors::convert(self.input(), errs))?;
86+
.map_err(|(_, errs)| LexerRunnerErrors::convert(self.input(), errs))?;
8787
if cfg!(feature = "debug") {
8888
println!("{ts:?}");
8989
} else {
@@ -97,13 +97,13 @@ impl Runnable for LexerRunner {
9797
if cfg!(feature = "debug") {
9898
let ts = lexer
9999
.lex()
100-
.map_err(|errs| LexerRunnerErrors::convert(self.input(), errs))?;
100+
.map_err(|(_, errs)| LexerRunnerErrors::convert(self.input(), errs))?;
101101
println!("{ts:?}");
102102
Ok(ts.to_string())
103103
} else {
104104
Ok(lexer
105105
.lex()
106-
.map_err(|errs| LexerRunnerErrors::convert(self.input(), errs))?
106+
.map_err(|(_, errs)| LexerRunnerErrors::convert(self.input(), errs))?
107107
.to_string())
108108
}
109109
}
@@ -213,7 +213,7 @@ impl Lexer /*<'a>*/ {
213213
}
214214
}
215215

216-
pub fn lex(self) -> Result<TokenStream, LexErrors> {
216+
pub fn lex(self) -> Result<TokenStream, (TokenStream, LexErrors)> {
217217
let mut result = TokenStream::empty();
218218
let mut errs = LexErrors::empty();
219219
for i in self {
@@ -227,7 +227,7 @@ impl Lexer /*<'a>*/ {
227227
if errs.is_empty() {
228228
Ok(result)
229229
} else {
230-
Err(errs)
230+
Err((result, errs))
231231
}
232232
}
233233

crates/erg_parser/parse.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ pub struct SimpleParser {}
107107

108108
impl Parsable for SimpleParser {
109109
fn parse(code: String) -> Result<CompleteArtifact, IncompleteArtifact> {
110-
let ts = Lexer::from_str(code).lex()?;
110+
let ts = Lexer::from_str(code).lex().map_err(|(_, es)| es)?;
111111
let mut parser = Parser::new(ts);
112112
let mut desugarer = Desugarer::new();
113113
let artifact = parser
@@ -218,7 +218,7 @@ pub struct Parser {
218218

219219
impl Parsable for Parser {
220220
fn parse(code: String) -> Result<CompleteArtifact, IncompleteArtifact<Module, ParseErrors>> {
221-
let ts = Lexer::from_str(code).lex()?;
221+
let ts = Lexer::from_str(code).lex().map_err(|(_, es)| es)?;
222222
Parser::new(ts).parse()
223223
}
224224
}
@@ -486,7 +486,7 @@ impl ParserRunner {
486486
) -> Result<CompleteArtifact, IncompleteArtifact<Module, ParserRunnerErrors>> {
487487
let ts = Lexer::new(Input::new(InputKind::Str(src), self.cfg.input.id()))
488488
.lex()
489-
.map_err(|errs| ParserRunnerErrors::convert(self.input(), errs))?;
489+
.map_err(|(_, errs)| ParserRunnerErrors::convert(self.input(), errs))?;
490490
Parser::new(ts)
491491
.parse()
492492
.map_err(|iart| iart.map_errs(|errs| ParserRunnerErrors::convert(self.input(), errs)))

crates/erg_parser/tests/parse_test.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ fn _parse_test_from_code(
100100
};
101101
let lexer = Lexer::new(input.clone());
102102
let mut parser = ParserRunner::new(cfg);
103-
match parser.parse_token_stream(lexer.lex().map_err(|errs| {
103+
match parser.parse_token_stream(lexer.lex().map_err(|(_, errs)| {
104104
ErrorArtifact::new(
105105
ParserRunnerErrors::empty(),
106106
ParserRunnerErrors::convert(&input, errs),

0 commit comments

Comments
 (0)