Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 37 additions & 20 deletions lrlex/src/lib/ctbuilder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ use regex::Regex;
use serde::Serialize;

use crate::{
DefaultLexerTypes, LRNonStreamingLexerDef, LexerDef, RegexOptions, DEFAULT_REGEX_OPTIONS,
DefaultLexerTypes, LRNonStreamingLexerDef, LexBuildError, LexerDef, RegexOptions,
DEFAULT_REGEX_OPTIONS,
};

const RUST_FILE_EXT: &str = "rs";
Expand Down Expand Up @@ -97,6 +98,7 @@ where
allow_missing_terms_in_lexer: bool,
allow_missing_tokens_in_parser: bool,
regex_options: RegexOptions,
on_lex_build_errors_fn: Option<&'a dyn Fn(Vec<LexBuildError>) -> Box<dyn Error>>,
}

impl<'a> CTLexerBuilder<'a, DefaultLexerTypes<u32>> {
Expand Down Expand Up @@ -141,6 +143,7 @@ where
allow_missing_terms_in_lexer: false,
allow_missing_tokens_in_parser: true,
regex_options: DEFAULT_REGEX_OPTIONS,
on_lex_build_errors_fn: None,
}
}

Expand Down Expand Up @@ -329,27 +332,33 @@ where
let lex_src = read_to_string(lexerp)?;
let line_cache = NewlineCache::from_str(&lex_src).unwrap();
let mut lexerdef: Box<dyn LexerDef<LexerTypesT>> = match self.lexerkind {
LexerKind::LRNonStreamingLexer => Box::new(
LRNonStreamingLexerDef::<LexerTypesT>::new_with_options(
LexerKind::LRNonStreamingLexer => {
let lexerdef = LRNonStreamingLexerDef::<LexerTypesT>::new_with_options(
&lex_src,
self.regex_options.clone(),
)
.map_err(|errs| {
errs.iter()
.map(|e| {
if let Some((line, column)) = line_cache.byte_to_line_num_and_col_num(
&lex_src,
e.spans().first().unwrap().start(),
) {
format!("{} at line {line} column {column}", e)
} else {
format!("{}", e)
}
})
.collect::<Vec<_>>()
.join("\n")
})?,
),
);
Box::new(if let Some(on_err_fn) = self.on_lex_build_errors_fn {
lexerdef.map_err(on_err_fn)?
} else {
lexerdef.map_err(|errs| {
errs.iter()
.map(|e| {
if let Some((line, column)) = line_cache
.byte_to_line_num_and_col_num(
&lex_src,
e.spans().first().unwrap().start(),
)
{
format!("{} at line {line} column {column}", e)
} else {
format!("{}", e)
}
})
.collect::<Vec<_>>()
.join("\n")
})?
})
}
};
let (missing_from_lexer, missing_from_parser) = match self.rule_ids_map {
Some(ref rim) => {
Expand Down Expand Up @@ -723,6 +732,14 @@ pub fn lexerdef() -> {lexerdef_type} {{
self.regex_options.nest_limit = Some(lim);
self
}

pub fn on_lex_build_error(
mut self,
f: &'a dyn Fn(Vec<LexBuildError>) -> Box<dyn Error>,
) -> Self {
self.on_lex_build_errors_fn = Some(f);
self
}
}

/// An interface to the result of [CTLexerBuilder::build()].
Expand Down
86 changes: 70 additions & 16 deletions lrpar/src/lib/ctbuilder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@ use std::{
use bincode::{deserialize, serialize_into};
use cfgrammar::{
newlinecache::NewlineCache,
yacc::{ast::ASTWithValidityInfo, YaccGrammar, YaccKind, YaccOriginalActionKind},
yacc::{
ast::{ASTWithValidityInfo, GrammarAST},
YaccGrammar, YaccGrammarError, YaccKind, YaccOriginalActionKind,
},
RIdx, Spanned, Symbol,
};
use filetime::FileTime;
Expand Down Expand Up @@ -166,6 +169,16 @@ where
show_warnings: bool,
visibility: Visibility,
rust_edition: RustEdition,
on_grammar_error_fn: Option<&'a dyn Fn(Vec<YaccGrammarError>) -> Box<dyn Error>>,
on_unexpected_conflicts_fn: Option<
&'a dyn Fn(
&GrammarAST,
&YaccGrammar<LexerTypesT::StorageT>,
&StateGraph<LexerTypesT::StorageT>,
&StateTable<LexerTypesT::StorageT>,
&Conflicts<LexerTypesT::StorageT>,
) -> Box<dyn Error>,
>,
phantom: PhantomData<LexerTypesT>,
}

Expand Down Expand Up @@ -209,6 +222,8 @@ where
show_warnings: true,
visibility: Visibility::Private,
rust_edition: RustEdition::Rust2021,
on_grammar_error_fn: None,
on_unexpected_conflicts_fn: None,
phantom: PhantomData,
}
}
Expand Down Expand Up @@ -339,6 +354,27 @@ where
self
}

pub fn on_grammar_error(
mut self,
f: &'a dyn Fn(Vec<YaccGrammarError>) -> Box<dyn Error>,
) -> Self {
self.on_grammar_error_fn = Some(f);
self
}

pub fn on_unexpected_conflicts(
mut self,
f: &'a dyn Fn(
&GrammarAST,
&YaccGrammar<LexerTypesT::StorageT>,
&StateGraph<LexerTypesT::StorageT>,
&StateTable<LexerTypesT::StorageT>,
&Conflicts<LexerTypesT::StorageT>,
) -> Box<dyn Error>,
) -> Self {
self.on_unexpected_conflicts_fn = Some(f);
self
}
/// Statically compile the Yacc file specified by [CTParserBuilder::grammar_path()] into Rust,
/// placing the output into the file spec [CTParserBuilder::output_path()]. Note that three
/// additional files will be created with the same name as specified in [self.output_path] but
Expand Down Expand Up @@ -463,21 +499,25 @@ where
grm
}
Err(errs) => {
let mut line_cache = NewlineCache::new();
line_cache.feed(&inc);
return Err(ErrorString(if errs.len() + warnings.len() > 1 {
// Indent under the "Error:" prefix.
format!(
"\n\t{}",
errs.iter()
.map(|e| spanned_fmt(e, &inc, &line_cache))
.chain(warnings.iter().map(|w| spanned_fmt(w, &inc, &line_cache)))
.collect::<Vec<_>>()
.join("\n\t")
)
if let Some(on_err_fn) = self.on_grammar_error_fn {
return Err(on_err_fn(errs));
} else {
spanned_fmt(errs.first().unwrap(), &inc, &line_cache)
}))?;
let mut line_cache = NewlineCache::new();
line_cache.feed(&inc);
return Err(ErrorString(if errs.len() + warnings.len() > 1 {
// Indent under the "Error:" prefix.
format!(
"\n\t{}",
errs.iter()
.map(|e| spanned_fmt(e, &inc, &line_cache))
.chain(warnings.iter().map(|w| spanned_fmt(w, &inc, &line_cache)))
.collect::<Vec<_>>()
.join("\n\t")
)
} else {
spanned_fmt(errs.first().unwrap(), &inc, &line_cache)
}))?;
}
}
};

Expand Down Expand Up @@ -529,7 +569,19 @@ where
(Some(i), None) if i == c.sr_len() && 0 == c.rr_len() => (),
(None, Some(j)) if 0 == c.sr_len() && j == c.rr_len() => (),
(None, None) if 0 == c.rr_len() && 0 == c.sr_len() => (),
_ => return Err(Box::new(CTConflictsError { stable })),
_ => {
if let Some(on_conflicts) = self.on_unexpected_conflicts_fn {
return Err(on_conflicts(
ast_validation.ast(),
&grm,
&sgraph,
&stable,
c,
));
} else {
return Err(Box::new(CTConflictsError { stable }));
}
}
}
}
}
Expand Down Expand Up @@ -661,6 +713,8 @@ where
show_warnings: self.show_warnings,
visibility: self.visibility.clone(),
rust_edition: self.rust_edition,
on_grammar_error_fn: None,
on_unexpected_conflicts_fn: None,
phantom: PhantomData,
};
Ok(cl.build()?.rule_ids)
Expand Down