Skip to content

Commit 3adf2ff

Browse files
committed
complete parser, but compile time is slow
1 parent 6632e89 commit 3adf2ff

File tree

6 files changed

+481
-13
lines changed

6 files changed

+481
-13
lines changed

argus-core/src/lib.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@ pub mod signals;
1717

1818
use std::time::Duration;
1919

20+
pub use expr::*;
21+
pub use signals::Signal;
2022
use thiserror::Error;
2123

2224
/// Errors generated by all Argus components.

argus-parser/Cargo.toml

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,15 @@ name = "argus-parser"
33
version = "0.1.0"
44
edition = "2021"
55

6+
[lib]
7+
name = "argus_parser"
8+
9+
[[bin]]
10+
name = "argus_parser"
11+
path = "src/main.rs"
12+
13+
614
[dependencies]
715
argus-core = { version = "0.1.0", path = "../argus-core" }
816
ariadne = "0.3.0"
9-
chumsky = "1.0.0-alpha.4"
17+
chumsky = { version = "1.0.0-alpha.4", features = ["default", "label"] }

argus-parser/src/lexer.rs

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,19 @@ pub type Span = SimpleSpan<usize>;
66
pub type Output<'a> = Vec<(Token<'a>, Span)>;
77
pub type Error<'a> = extra::Err<Rich<'a, char, Span>>;
88

9-
#[derive(Clone, Debug, PartialEq, Eq)]
9+
#[derive(Clone, Debug, PartialEq)]
1010
pub enum Token<'src> {
1111
Semicolon,
1212
LBracket,
1313
RBracket,
1414
LParen,
1515
RParen,
1616
Comma,
17+
DotDot,
1718
Bool(bool),
18-
Num(&'src str),
19+
Int(i64),
20+
UInt(u64),
21+
Float(f64),
1922
Ident(&'src str),
2023
Minus,
2124
Plus,
@@ -49,8 +52,11 @@ impl<'src> fmt::Display for Token<'src> {
4952
Token::LParen => write!(f, "("),
5053
Token::RParen => write!(f, ")"),
5154
Token::Comma => write!(f, ","),
55+
Token::DotDot => write!(f, ".."),
5256
Token::Bool(val) => write!(f, "{}", val),
53-
Token::Num(val) => write!(f, "{}", val),
57+
Token::Int(val) => write!(f, "{}", val),
58+
Token::UInt(val) => write!(f, "{}", val),
59+
Token::Float(val) => write!(f, "{}", val),
5460
Token::Ident(ident) => write!(f, "{}", ident),
5561
Token::Minus => write!(f, "-"),
5662
Token::Plus => write!(f, "+"),
@@ -85,14 +91,22 @@ pub fn lexer<'src>() -> impl Parser<'src, &'src str, Output<'src>, Error<'src>>
8591

8692
let exp = just('e').or(just('E')).then(one_of("+-").or_not()).then(digits);
8793

88-
let number = just('-')
94+
let floating_number = just('-')
8995
.or_not()
90-
.then(text::int(10))
96+
.then(digits)
9197
.then(frac.or_not())
9298
.then(exp.or_not())
93-
.map_slice(Token::Num)
99+
.map_slice(|s: &str| Token::Float(s.parse().unwrap()))
94100
.boxed();
95101

102+
let signed_int = one_of("+-")
103+
.or_not()
104+
.then(digits)
105+
.map_slice(|s: &str| Token::Int(s.parse().unwrap()));
106+
let unsigned_int = digits.map_slice(|s: &str| Token::UInt(s.parse().unwrap()));
107+
108+
let number = choice((floating_number, signed_int, unsigned_int));
109+
96110
// A parser for control characters (delimiters, semicolons, etc.)
97111
let ctrl = choice((
98112
just(";").to(Token::Semicolon),
@@ -101,6 +115,7 @@ pub fn lexer<'src>() -> impl Parser<'src, &'src str, Output<'src>, Error<'src>>
101115
just("(").to(Token::LParen),
102116
just(")").to(Token::RParen),
103117
just(",").to(Token::Comma),
118+
just("..").to(Token::DotDot),
104119
));
105120

106121
// Lexer for operator symbols

argus-parser/src/lib.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
//! # Argus logic syntax
2-
#![allow(dead_code)]
32
43
mod lexer;
54
mod parser;
5+
6+
pub use lexer::{lexer, Error as LexError, Span, Token};
7+
pub use parser::{parser, Expr, Interval};

argus-parser/src/main.rs

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
use std::{env, fs};
2+
3+
use argus_parser::lexer;
4+
// use crate::parser::{parser, Error as ParseError};
5+
use ariadne::{sources, Color, Label, Report, ReportKind};
6+
use chumsky::Parser;
7+
8+
fn main() {
9+
let src = env::args().nth(1).expect("Expected expression");
10+
11+
let (tokens, mut errs) = lexer().parse(src.as_str()).into_output_errors();
12+
13+
println!("*** Outputting tokens ***");
14+
if let Some(tokens) = &tokens {
15+
for token in tokens {
16+
println!("-> {:?}", token);
17+
}
18+
}
19+
20+
let parse_errs = if let Some(tokens) = &tokens {
21+
let (ast, parse_errs) = parser()
22+
.map_with_span(|ast, span| (ast, span))
23+
.parse(tokens.as_slice().spanned((src.len()..src.len()).into()))
24+
.into_output_errors();
25+
26+
println!("*** Outputting tokens ***");
27+
println!("{:#?}", ast);
28+
29+
parse_errs
30+
} else {
31+
Vec::new()
32+
};
33+
34+
errs.into_iter()
35+
.map(|e| e.map_token(|c| c.to_string()))
36+
// .chain(parse_errs.into_iter().map(|e| e.map_token(|tok| tok.to_string())))
37+
.for_each(|e| {
38+
Report::build(ReportKind::Error, src.clone(), e.span().start)
39+
.with_message(e.to_string())
40+
.with_label(
41+
Label::new((src.clone(), e.span().into_range()))
42+
.with_message(e.reason().to_string())
43+
.with_color(Color::Red),
44+
)
45+
.with_labels(e.contexts().map(|(label, span)| {
46+
Label::new((src.clone(), span.into_range()))
47+
.with_message(format!("while parsing this {}", label))
48+
.with_color(Color::Yellow)
49+
}))
50+
.finish()
51+
.print(sources([(src.clone(), src.clone())]))
52+
.unwrap()
53+
});
54+
}

0 commit comments

Comments
 (0)