|
1 | 1 | use std::{env, fmt, fs};
|
2 | 2 |
|
3 | 3 | use ariadne::{sources, Color, Label, Report, ReportKind};
|
4 |
| -use chumsky::{input::SpannedInput, prelude::*}; |
| 4 | +use chumsky::input::SpannedInput; |
| 5 | +use chumsky::prelude::*; |
5 | 6 |
|
6 |
| -use crate::lexer::{lexer, Span, Token}; |
| 7 | +use crate::lexer::{lexer, Error as LexError, Span, Token}; |
7 | 8 |
|
8 | 9 | pub type Spanned<T> = (T, Span);
|
9 | 10 |
|
10 |
| -// The type of the input that our parser operates on. The input is the `&[(Token, Span)]` token buffer generated by the |
11 |
| -// lexer, wrapped in a `SpannedInput` which 'splits' it apart into its constituent parts, tokens and spans, for chumsky |
| 11 | +// The type of the input that our parser operates on. The input is the `&[(Token, |
| 12 | +// Span)]` token buffer generated by the lexer, wrapped in a `SpannedInput` which |
| 13 | +// 'splits' it apart into its constituent parts, tokens and spans, for chumsky |
12 | 14 | // to understand.
|
13 | 15 | type ParserInput<'tokens, 'src> = SpannedInput<Token<'src>, Span, &'tokens [(Token<'src>, Span)]>;
|
14 | 16 |
|
15 | 17 | pub fn parser<'tokens, 'src: 'tokens>(
|
16 |
| -) -> impl Parser<'tokens, ParserInput<'tokens, 'src>, Spanned<Expr<'src>>, extra::Err<Rich<'tokens, Token<'src>, Span>>> |
17 |
| - + Clone { |
| 18 | +) -> impl Parser<'tokens, ParserInput<'tokens, 'src>, Spanned<Expr<'src>>, LexError<'src>> + Clone { |
18 | 19 | }
|
0 commit comments