Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
9362f73
Start new lexer; Add tests and benchmarks
kubouch Oct 31, 2024
3c30c48
Optimize symbol lexing; Add tracy; Remove pub
kubouch Nov 2, 2024
309391a
Optimize bareword lexing
kubouch Nov 2, 2024
f06074d
Remove comment
kubouch Nov 2, 2024
c4b7961
Add Tracy span
kubouch Nov 2, 2024
d22f0a4
Pre-allocate lexer storage
kubouch Nov 2, 2024
a109d62
Add missing lex test file
kubouch Nov 2, 2024
2d40fb0
WIP: Start Logos lexer
kubouch Nov 16, 2024
d7aca25
Skip horizontal whitespace
kubouch Nov 20, 2024
d38651e
Add most missing tokens
kubouch Nov 20, 2024
1f2ac5a
Add parsing of float
kubouch Nov 26, 2024
dd5018b
Remove IntWithUnit
kubouch Nov 26, 2024
a2fefc0
Add old lexer float snapshot
kubouch Nov 26, 2024
157ce4a
WIP Logos parser
kubouch Dec 9, 2024
aaf94c0
Fix sample
kubouch Dec 14, 2024
5ac75f8
Fix horizontal space check; Remove some unused code
kubouch Dec 14, 2024
a8d39ec
Refactor unused lexers
kubouch Dec 14, 2024
e8ad841
Remove comment
kubouch Dec 25, 2024
053ec26
Use Tokens data structure
kubouch Dec 25, 2024
cc94787
Start migrating to new tokens; Add missing snaps
kubouch Dec 25, 2024
fa5a155
Continure refactors
kubouch Dec 26, 2024
ccd0719
Finish refactoring
kubouch Dec 26, 2024
69cdd8b
Remove some unused code
kubouch Dec 26, 2024
2e0c2e2
Refactor lexer; Add docs
kubouch Dec 27, 2024
e111cc3
Remove check from advance()
kubouch Dec 27, 2024
d17081d
Remove commented code
kubouch Dec 27, 2024
afe7a19
Remove unused file
kubouch Dec 27, 2024
ae631e7
Add missing test
kubouch Dec 27, 2024
94e2031
Remove unuseed name strictness
kubouch Dec 27, 2024
19d498a
Rename TokenType3 to Token
kubouch Dec 27, 2024
f731cd9
Simplify BarewordContext
kubouch Dec 27, 2024
0bbdd6f
Merge branch 'main' into lexer2
kubouch Dec 27, 2024
fa46c0f
Consolidate BarewordContexts
kubouch Dec 27, 2024
2917e93
Allow more liberal barewords in call position
kubouch Dec 27, 2024
e8c312c
Use tokens.advance()
kubouch Dec 28, 2024
252e1da
Add missing calls.nu test result
kubouch Dec 28, 2024
95b9ae7
Remove unused file
kubouch Dec 28, 2024
3a2fa3c
Rename lexing benchmark
kubouch Dec 28, 2024
8619fbd
Separate lexing and parsing benchmarks
kubouch Dec 28, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 56 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ edition = "2021"

[dependencies]
tracy-client = { version = "0.17.3", default-features = false } # for tracy v0.11.1
logos = "0.15"

[profile.profiling]
inherits = "release"
Expand Down
51 changes: 46 additions & 5 deletions benches/benchmarks.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::process::exit;

use new_nu_parser::lexer::{lex, Tokens};
use nu_protocol::engine::{EngineState, StateWorkingSet};
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, Benchmark, IntoBenchmarks};

Expand All @@ -16,9 +17,11 @@ const BENCHMARKS: &[&str] = &[
"combined10",
"combined100",
"combined1000",
"int100",
];

enum Stage {
Lex,
Parse,
Resolve,
Typecheck,
Expand All @@ -30,6 +33,7 @@ enum Stage {

/// Stages of compilation we want to profile
const STAGES: &[Stage] = &[
Stage::Lex,
Stage::Parse,
Stage::Resolve,
Stage::Typecheck,
Expand All @@ -52,8 +56,15 @@ fn setup_compiler(
let contents = std::fs::read(fname).map_err(|_| format!("Cannot find file {fname}"))?;
compiler.add_file(&fname, &contents);

let (tokens, err) = lex(&contents, span_offset);
if let Err(e) = err {
tokens.eprint(&compiler.source);
eprintln!("Lexing error. Error: {:?}", e);
exit(1);
}

if do_parse {
let parser = Parser::new(compiler, span_offset);
let parser = Parser::new(compiler, tokens);
compiler = parser.parse();

if !compiler.errors.is_empty() {
Expand Down Expand Up @@ -87,8 +98,8 @@ fn setup_compiler(
}

/// Parse only
pub fn parse(mut compiler: Compiler, span_offset: usize) {
let parser = Parser::new(compiler, span_offset);
pub fn parse(mut compiler: Compiler, tokens: Tokens) {
let parser = Parser::new(compiler, tokens);
compiler = parser.parse();

if !compiler.errors.is_empty() {
Expand Down Expand Up @@ -129,7 +140,14 @@ pub fn typecheck(mut compiler: Compiler, do_merge: bool) {

/// Run all compiler stages
pub fn compile(mut compiler: Compiler, span_offset: usize) {
let parser = Parser::new(compiler, span_offset);
let (tokens, err) = lex(&compiler.source, span_offset);
if let Err(e) = err {
tokens.eprint(&compiler.source);
eprintln!("Lexing error. Error: {:?}", e);
exit(1);
}

let parser = Parser::new(compiler, tokens);
compiler = parser.parse();

if !compiler.errors.is_empty() {
Expand Down Expand Up @@ -176,13 +194,36 @@ fn compiler_benchmarks() -> impl IntoBenchmarks {
let bench_file = format!("benches/nu/{bench_name}.nu");

let bench = match stage {
Stage::Lex => {
let name = format!("{bench_name}_lex");
benchmark_fn(name, move |b| {
let contents = std::fs::read(&bench_file)
.expect(&format!("Cannot find file {bench_file}"));
b.iter(move || {
let (tokens, err) = lex(&contents, 0);
if let Err(e) = err {
tokens.eprint(&contents);
eprintln!("Lexing error. Error: {:?}", e);
exit(1);
}
})
})
}
Stage::Parse => {
let name = format!("{bench_name}_parse");
benchmark_fn(name, move |b| {
let (compiler_def_init, span_offset) =
setup_compiler(&bench_file, false, false, false)
.expect("Error setting up compiler");
b.iter(move || parse(compiler_def_init.clone(), span_offset))
let contents = std::fs::read(&bench_file)
.expect(&format!("Cannot find file {bench_file}"));
let (tokens, err) = lex(&contents, span_offset);
if let Err(e) = err {
tokens.eprint(&contents);
eprintln!("Lexing error. Error: {:?}", e);
exit(1);
}
b.iter(move || parse(compiler_def_init.clone(), tokens.clone()))
})
}
Stage::Resolve => {
Expand Down
Loading