diff --git a/.buildbot.sh b/.buildbot.sh index 94cd2b9b1..b0f48617d 100644 --- a/.buildbot.sh +++ b/.buildbot.sh @@ -61,6 +61,9 @@ touch src/main.rs && CACHE_EXPECTED=y cargo build cd $root/lrpar/examples/calc_ast echo "2 + 3 * 4" | cargo run --package nimbleparse -- src/calc.l src/calc.y - echo "2 + 3 * 4" | cargo run | grep "Result: 14" +cd $root/lrpar/examples/calc_ast_arena +echo "2 + 3 * 4" | cargo run --package nimbleparse -- src/calc.l src/calc.y - +echo "2 + 3 * 4" | cargo run | grep "Result: 14" touch src/main.rs && CACHE_EXPECTED=y cargo build cd $root/lrpar/examples/calc_parsetree echo "2 + 3 * 4" | cargo run --package nimbleparse -- src/calc.l src/calc.y - diff --git a/Cargo.toml b/Cargo.toml index af34f5fa9..219eb7126 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,7 @@ members=[ "lrpar/examples/calc_actions", "lrpar/examples/calc_ast", "lrpar/examples/calc_parsetree", + "lrpar/examples/calc_ast_arena", "lrpar/examples/start_states", "lrpar/examples/clone_param", "lrtable", diff --git a/cfgrammar/src/lib/yacc/ast.rs b/cfgrammar/src/lib/yacc/ast.rs index fd4cde57b..cfca3310e 100644 --- a/cfgrammar/src/lib/yacc/ast.rs +++ b/cfgrammar/src/lib/yacc/ast.rs @@ -170,6 +170,7 @@ pub struct GrammarAST { pub expect: Option<(usize, Span)>, pub expectrr: Option<(usize, Span)>, pub parse_param: Option<(String, String)>, + pub parse_generics: Option, pub programs: Option, // The set of symbol names that, if unused in a // grammar, will not cause a warning or error. @@ -248,6 +249,7 @@ impl GrammarAST { expect: None, expectrr: None, parse_param: None, + parse_generics: None, programs: None, expect_unused: Vec::new(), } diff --git a/cfgrammar/src/lib/yacc/grammar.rs b/cfgrammar/src/lib/yacc/grammar.rs index c895873a9..3e19c6d45 100644 --- a/cfgrammar/src/lib/yacc/grammar.rs +++ b/cfgrammar/src/lib/yacc/grammar.rs @@ -83,6 +83,8 @@ pub struct YaccGrammar { actions: Box<[Option]>, /// A `(name, type)` pair defining an extra parameter to pass to action functions. parse_param: Option<(String, String)>, + /// Generic parameters (types and lifetimes) to pass to action functions. + parse_generics: Option, /// Lifetimes for `param_args` programs: Option, /// The actiontypes of rules (one per rule). @@ -130,6 +132,7 @@ where implicit_rule: Decode::decode(decoder)?, actions: Decode::decode(decoder)?, parse_param: Decode::decode(decoder)?, + parse_generics: Decode::decode(decoder)?, programs: Decode::decode(decoder)?, actiontypes: Decode::decode(decoder)?, avoid_insert: Decode::decode(decoder)?, @@ -168,6 +171,7 @@ where implicit_rule: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, actions: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, parse_param: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, + parse_generics: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, programs: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, actiontypes: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, avoid_insert: ::bincode::BorrowDecode::<'_, __Context>::borrow_decode(decoder)?, @@ -456,6 +460,7 @@ where implicit_rule: implicit_rule.map(|x| rule_map[&x]), actions: actions.into_boxed_slice(), parse_param: ast.parse_param.clone(), + parse_generics: ast.parse_generics.clone(), programs: ast.programs.clone(), avoid_insert, actiontypes: actiontypes.into_boxed_slice(), @@ -628,6 +633,10 @@ where &self.parse_param } + pub fn parse_generics(&self) -> &Option { + &self.parse_generics + } + /// Get the programs part of the grammar pub fn programs(&self) -> &Option { &self.programs diff --git a/cfgrammar/src/lib/yacc/parser.rs b/cfgrammar/src/lib/yacc/parser.rs index eaab7dead..3ac9e6392 100644 --- a/cfgrammar/src/lib/yacc/parser.rs +++ b/cfgrammar/src/lib/yacc/parser.rs @@ -549,6 +549,13 @@ impl YaccParser<'_> { i = self.parse_ws(j, true)?; continue; } + if let Some(j) = self.lookahead_is("%parse-generics", i) { + i = self.parse_ws(j, false)?; + let (j, ty) = self.parse_to_eol(i)?; + self.ast.parse_generics = Some(ty); + i = self.parse_ws(j, true)?; + continue; + } if let YaccKind::Eco = self.yacc_kind { if let Some(j) = self.lookahead_is("%implicit_tokens", i) { i = self.parse_ws(j, false)?; @@ -2454,6 +2461,18 @@ x" ); } + #[test] + fn test_parse_generics() { + let src = " + %parse-generics 'a, K, V + %% + A: 'a'; + "; + let grm = parse(YaccKind::Original(YaccOriginalActionKind::UserAction), src).unwrap(); + + assert_eq!(grm.parse_generics, Some("'a, K, V".to_owned())); + } + #[test] fn test_duplicate_rule() { let ast = parse( diff --git a/doc/src/actioncode.md b/doc/src/actioncode.md index 61dce026f..0c3463609 100644 --- a/doc/src/actioncode.md +++ b/doc/src/actioncode.md @@ -69,3 +69,19 @@ R -> ...: 'ID' { format!("{}{}", p, ...) } ; ``` + +# Generic parse parameter + +If `%parse-param` needs to be generic, additional type variables and lifetimes +can be specified in the `%parse-generics T1, T2, ...` declaration. + +For example, if a grammar has following declarations: + +``` +%parse-generics T: FromStr +%parse-param p: T +``` + +then the `parse` function will take an additional parameter of type `T`. + +This can be used, for example, [to allocate AST nodes in a memory arena.](https://github.com/softdevteam/grmtools/tree/master/lrpar/examples/calc_ast_arena). diff --git a/lrpar/cttests/src/lib.rs b/lrpar/cttests/src/lib.rs index 2eb34a224..4f98b8102 100644 --- a/lrpar/cttests/src/lib.rs +++ b/lrpar/cttests/src/lib.rs @@ -47,6 +47,9 @@ lrpar_mod!("parseparam.y"); lrlex_mod!("parseparam_copy.l"); lrpar_mod!("parseparam_copy.y"); +lrlex_mod!("typeparams.l"); +lrpar_mod!("typeparams.y"); + lrlex_mod!("passthrough.l"); lrpar_mod!("passthrough.y"); @@ -294,6 +297,16 @@ fn test_parseparam_copy() { } } +#[test] +fn test_typeparams() { + let lexerdef = typeparams_l::lexerdef(); + let lexer = lexerdef.lexer("101"); + match typeparams_y::parse(&lexer, &3u64) { + (Some(104u64), _) => (), + _ => unreachable!(), + } +} + #[test] fn test_passthrough() { let lexerdef = passthrough_l::lexerdef(); diff --git a/lrpar/cttests/src/typeparams.test b/lrpar/cttests/src/typeparams.test new file mode 100644 index 000000000..edb12f35e --- /dev/null +++ b/lrpar/cttests/src/typeparams.test @@ -0,0 +1,14 @@ +name: Test %parse-param +yacckind: Grmtools +grammar: | + %start S + %parse-generics 'a, T: Into + Copy, R: From + %parse-param p: &'a T + %% + S -> R: + 'INT' { From::from((*p).into() + $lexer.span_str($1.unwrap().span()).parse::().unwrap()) } + ; + %% +lexer: | + %% + [0-9]+ 'INT' diff --git a/lrpar/examples/calc_ast_arena/Cargo.toml b/lrpar/examples/calc_ast_arena/Cargo.toml new file mode 100644 index 000000000..88bcdf209 --- /dev/null +++ b/lrpar/examples/calc_ast_arena/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "calc_ast_arena" +version = "0.1.0" +authors = ["Laurence Tratt "] +edition = "2024" +license = "Apache-2.0/MIT" + +[[bin]] +doc = false +name = "calc_ast_arena" + +[build-dependencies] +cfgrammar = { path="../../../cfgrammar" } +lrlex = { path="../../../lrlex" } +lrpar = { path="../.." } + +[dependencies] +cfgrammar = { path="../../../cfgrammar" } +lrlex = { path="../../../lrlex" } +lrpar = { path="../.." } +bumpalo = "3" diff --git a/lrpar/examples/calc_ast_arena/build.rs b/lrpar/examples/calc_ast_arena/build.rs new file mode 100644 index 000000000..c441cb011 --- /dev/null +++ b/lrpar/examples/calc_ast_arena/build.rs @@ -0,0 +1,18 @@ +#![deny(rust_2018_idioms)] +use lrlex::CTLexerBuilder; + +fn main() { + // Since we're using both lrlex and lrpar, we use lrlex's `lrpar_config` convenience function + // that makes it easy to a) create a lexer and parser and b) link them together. + CTLexerBuilder::new() + .rust_edition(lrlex::RustEdition::Rust2021) + .lrpar_config(|ctp| { + ctp.rust_edition(lrpar::RustEdition::Rust2021) + .grammar_in_src_dir("calc.y") + .unwrap() + }) + .lexer_in_src_dir("calc.l") + .unwrap() + .build() + .unwrap(); +} diff --git a/lrpar/examples/calc_ast_arena/src/calc.l b/lrpar/examples/calc_ast_arena/src/calc.l new file mode 100644 index 000000000..7f55ab673 --- /dev/null +++ b/lrpar/examples/calc_ast_arena/src/calc.l @@ -0,0 +1,8 @@ +%% +[0-9]+ "INT" +\+ "+" +\* "*" +\( "(" +\) ")" +[\t\n ]+ ; +. "UNMATCHED" diff --git a/lrpar/examples/calc_ast_arena/src/calc.y b/lrpar/examples/calc_ast_arena/src/calc.y new file mode 100644 index 000000000..2e2e05a3f --- /dev/null +++ b/lrpar/examples/calc_ast_arena/src/calc.y @@ -0,0 +1,53 @@ +%grmtools { + yacckind: Grmtools, + test_files: "input*.txt", +} +%start Expr +%avoid_insert "INT" +%expect-unused Unmatched "UNMATCHED" +%parse-generics 'ast +%parse-param arena: &'ast Bump +%% +Expr -> Result, ()>: + Expr '+' Term { + Ok(Expr::Add{ span: $span, lhs: arena.alloc($1?), rhs: arena.alloc($3?) }) + } + | Term { $1 } + ; + +Term -> Result, ()>: + Term '*' Factor { + Ok(Expr::Mul{ span: $span, lhs: arena.alloc($1?), rhs: arena.alloc($3?) }) + } + | Factor { $1 } + ; + +Factor -> Result, ()>: + '(' Expr ')' { $2 } + | 'INT' { Ok(Expr::Number{ span: $span }) } + ; + +Unmatched -> (): + "UNMATCHED" { } + ; +%% + +use cfgrammar::Span; +use bumpalo::Bump; + +#[derive(Debug)] +pub enum Expr<'ast> { + Add { + span: Span, + lhs: &'ast Expr<'ast>, + rhs: &'ast Expr<'ast>, + }, + Mul { + span: Span, + lhs: &'ast Expr<'ast>, + rhs: &'ast Expr<'ast>, + }, + Number { + span: Span + } +} diff --git a/lrpar/examples/calc_ast_arena/src/input.txt b/lrpar/examples/calc_ast_arena/src/input.txt new file mode 100644 index 000000000..5307e9d7c --- /dev/null +++ b/lrpar/examples/calc_ast_arena/src/input.txt @@ -0,0 +1 @@ +5 + 4 * 3 \ No newline at end of file diff --git a/lrpar/examples/calc_ast_arena/src/main.rs b/lrpar/examples/calc_ast_arena/src/main.rs new file mode 100644 index 000000000..e711ae303 --- /dev/null +++ b/lrpar/examples/calc_ast_arena/src/main.rs @@ -0,0 +1,75 @@ +#![allow(clippy::unnecessary_wraps)] + +use std::io::{self, BufRead, Write}; + +use cfgrammar::Span; +use lrlex::{DefaultLexerTypes, lrlex_mod}; +use lrpar::{NonStreamingLexer, lrpar_mod}; + +// Using `lrlex_mod!` brings the lexer for `calc.l` into scope. By default the module name will be +// `calc_l` (i.e. the file name, minus any extensions, with a suffix of `_l`). +lrlex_mod!("calc.l"); +// Using `lrpar_mod!` brings the parser for `calc.y` into scope. By default the module name will be +// `calc_y` (i.e. the file name, minus any extensions, with a suffix of `_y`). +lrpar_mod!("calc.y"); + +use calc_y::Expr; + +fn main() { + // Get the `LexerDef` for the `calc` language. + let lexerdef = calc_l::lexerdef(); + let stdin = io::stdin(); + loop { + print!(">>> "); + io::stdout().flush().ok(); + match stdin.lock().lines().next() { + Some(Ok(ref l)) => { + if l.trim().is_empty() { + continue; + } + // Now we create a lexer with the `lexer` method with which we can lex an input. + let lexer = lexerdef.lexer(l); + let arena = bumpalo::Bump::new(); + // Pass the lexer to the parser and lex and parse the input. + let (res, errs) = calc_y::parse(&lexer, &arena); + for e in errs { + println!("{}", e.pp(&lexer, &calc_y::token_epp)); + } + if let Some(Ok(r)) = res { + match eval(&lexer, &r) { + Ok(i) => println!("Result: {}", i), + Err((span, msg)) => { + let ((line, col), _) = lexer.line_col(span); + eprintln!( + "Evaluation error at line {} column {}, '{}' {}.", + line, + col, + lexer.span_str(span), + msg + ) + } + } + } + } + _ => break, + } + } +} + +fn eval( + lexer: &dyn NonStreamingLexer>, + e: &Expr, +) -> Result { + match e { + Expr::Add { span, lhs, rhs } => eval(lexer, *lhs)? + .checked_add(eval(lexer, *rhs)?) + .ok_or((*span, "overflowed")), + Expr::Mul { span, lhs, rhs } => eval(lexer, *lhs)? + .checked_mul(eval(lexer, *rhs)?) + .ok_or((*span, "overflowed")), + Expr::Number { span } => lexer + .span_str(*span) + .parse::() + .map_err(|_| (*span, "cannot be represented as a u64")), + } +} diff --git a/lrpar/src/lib/ctbuilder.rs b/lrpar/src/lib/ctbuilder.rs index 0c6bd7d2b..7d5c227a1 100644 --- a/lrpar/src/lib/ctbuilder.rs +++ b/lrpar/src/lib/ctbuilder.rs @@ -34,6 +34,7 @@ use lrtable::{Minimiser, StateGraph, StateTable, from_yacc, statetable::Conflict use num_traits::{AsPrimitive, PrimInt, Unsigned}; use proc_macro2::{Literal, TokenStream}; use quote::{ToTokens, TokenStreamExt, format_ident, quote}; +use syn::{Generics, parse_quote}; const ACTION_PREFIX: &str = "__gt_"; const GLOBAL_PREFIX: &str = "__GT_"; @@ -1080,6 +1081,8 @@ where } YaccKind::Original(YaccOriginalActionKind::UserAction) | YaccKind::Grmtools => { let actionskind = str::parse::(ACTIONS_KIND)?; + let parsed_parse_generics = make_generics(grm.parse_generics().as_deref())?; + let (_, type_generics, _) = parsed_parse_generics.split_for_impl(); // actions always have a parse_param argument, and when the `parse` function lacks one // that parameter will be unit. let (action_fn_parse_param, action_fn_parse_param_ty) = match grm.parse_param() { @@ -1108,9 +1111,9 @@ where ::cfgrammar::RIdx<#storaget>, &'lexer dyn ::lrpar::NonStreamingLexer<'input, #lexertypest>, ::cfgrammar::Span, - ::std::vec::Drain<#edition_lifetime ::lrpar::parser::AStackType<<#lexertypest as ::lrpar::LexerTypes>::LexemeT, #actionskind<'input>>>, + ::std::vec::Drain<#edition_lifetime ::lrpar::parser::AStackType<<#lexertypest as ::lrpar::LexerTypes>::LexemeT, #actionskind #type_generics>>, #action_fn_parse_param_ty - ) -> #actionskind<'input> + ) -> #actionskind #type_generics > = ::std::vec![#(&#wrappers,)*]; match ::lrpar::RTParserBuilder::new(&grm, &stable) .recoverer(#recoverer) @@ -1124,6 +1127,14 @@ where kind => panic!("YaccKind {:?} not supported", kind), }; + let parsed_parse_generics: Generics = match self.yacckind.unwrap() { + YaccKind::Original(YaccOriginalActionKind::UserAction) | YaccKind::Grmtools => { + make_generics(grm.parse_generics().as_deref())? + } + _ => make_generics(None)?, + }; + let (generics, _, where_clause) = parsed_parse_generics.split_for_impl(); + // `parse()` may or may not have an argument for `%parseparam`. let parse_fn_parse_param = match self.yacckind.unwrap() { YaccKind::Original(YaccOriginalActionKind::UserAction) | YaccKind::Grmtools => { @@ -1165,10 +1176,12 @@ where const __STABLE_DATA: &[u8] = &[#(#stable_data,)*]; #[allow(dead_code)] - pub fn parse<'lexer, 'input: 'lexer>( + pub fn parse #generics ( lexer: &'lexer dyn ::lrpar::NonStreamingLexer<'input, #lexertypest>, #parse_fn_parse_param - ) -> #parse_fn_return_ty { + ) -> #parse_fn_return_ty + #where_clause + { let (grm, stable) = ::lrpar::ctbuilder::_reconstitute(__GRM_DATA, __STABLE_DATA); #run_parser } @@ -1219,10 +1232,10 @@ where } /// Generate the wrappers that call user actions - fn gen_wrappers( - &self, - grm: &YaccGrammar, - ) -> Result { + fn gen_wrappers(&self, grm: &YaccGrammar) -> Result> { + let parsed_parse_generics = make_generics(grm.parse_generics().as_deref())?; + let (generics, type_generics, where_clause) = parsed_parse_generics.split_for_impl(); + let (parse_paramname, parse_paramdef); match grm.parse_param() { Some((name, tyname)) => { @@ -1265,16 +1278,16 @@ where Symbol::Rule(ref_ridx) => { let ref_ridx = usize::from(ref_ridx); let actionvariant = format_ident!("{}{}", ACTIONS_KIND_PREFIX, ref_ridx); - quote!{ + quote! { #[allow(clippy::let_unit_value)] let #arg = match #args_var.next().unwrap() { - ::lrpar::parser::AStackType::ActionType(#actionskind::#actionvariant(x)) => x, + ::lrpar::parser::AStackType::ActionType(#actionskind::#type_generics::#actionvariant(x)) => x, _ => unreachable!() }; } } Symbol::Token(_) => { - quote!{ + quote! { let #arg = match #args_var.next().unwrap() { ::lrpar::parser::AStackType::Lexeme(l) => { if l.faulty() { @@ -1302,14 +1315,14 @@ where // If the rule `r` that we're calling has the unit type then Clippy will warn that // `enum::A(wrapper_r())` is pointless. We thus have to split it into two: // `wrapper_r(); enum::A(())`. - quote!{ + quote! { #action_fn(#ridx_var, #lexer_var, #span_var, #parse_paramname, #(#args,)*); - #actionskind::#actionsvariant(()) + #actionskind::#type_generics::#actionsvariant(()) } } _ => { - quote!{ - #actionskind::#actionsvariant(#action_fn(#ridx_var, #lexer_var, #span_var, #parse_paramname, #(#args,)*)) + quote! { + #actionskind::#type_generics::#actionsvariant(#action_fn(#ridx_var, #lexer_var, #span_var, #parse_paramname, #(#args,)*)) } } }) @@ -1328,15 +1341,17 @@ where } else { None }; - wrappers.extend(quote!{ + wrappers.extend(quote! { #attrib - fn #wrapper_fn<'lexer, 'input: 'lexer>( + fn #wrapper_fn #generics ( #ridx_var: ::cfgrammar::RIdx<#storaget>, #lexer_var: &'lexer dyn ::lrpar::NonStreamingLexer<'input, #lexertypest>, #span_var: ::cfgrammar::Span, - mut #args_var: ::std::vec::Drain<#edition_lifetime ::lrpar::parser::AStackType<<#lexertypest as ::lrpar::LexerTypes>::LexemeT, #actionskind<'input>>>, + mut #args_var: ::std::vec::Drain<#edition_lifetime ::lrpar::parser::AStackType<<#lexertypest as ::lrpar::LexerTypes>::LexemeT, #actionskind #type_generics>>, #parse_paramdef - ) -> #actionskind<'input> { + ) -> #actionskind #type_generics + #where_clause + { #wrapper_fn_body } }) @@ -1344,6 +1359,7 @@ where let mut actionskindvariants = Vec::new(); let actionskindhidden = format_ident!("_{}", ACTIONS_KIND_HIDDEN); let actionskind = str::parse::(ACTIONS_KIND).unwrap(); + let mut phantom_data_type = Vec::new(); for ridx in grm.iter_rules() { if let Some(actiont) = grm.actiontype(ridx) { let actionskindvariant = @@ -1354,11 +1370,20 @@ where }) } } - actionskindvariants - .push(quote!(#actionskindhidden(::std::marker::PhantomData<&'input ()>))); + for lifetime in parsed_parse_generics.lifetimes() { + let lifetime = &lifetime.lifetime; + phantom_data_type.push(quote! { &#lifetime () }); + } + for type_param in parsed_parse_generics.type_params() { + let ident = &type_param.ident; + phantom_data_type.push(quote! { #ident }); + } + actionskindvariants.push(quote! { + #actionskindhidden(::std::marker::PhantomData<(#(#phantom_data_type,)*)>) + }); wrappers.extend(quote! { #[allow(dead_code)] - enum #actionskind<'input> { + enum #actionskind #generics #where_clause { #(#actionskindvariants,)* } }); @@ -1374,6 +1399,8 @@ where .transpose()?; let mut action_fns = TokenStream::new(); // Convert actions to functions + let parsed_parse_generics = make_generics(grm.parse_generics().as_deref())?; + let (generics, _, where_clause) = parsed_parse_generics.split_for_impl(); let (parse_paramname, parse_paramdef, parse_param_unit); match grm.parse_param() { Some((name, tyname)) => { @@ -1479,17 +1506,20 @@ where } let action_body = str::parse::(&outs)?; - action_fns.extend(quote!{ - #[allow(clippy::too_many_arguments)] - fn #action_fn<'lexer, 'input: 'lexer>(#ridx_var: ::cfgrammar::RIdx<#storaget>, - #lexer_var: &'lexer dyn ::lrpar::NonStreamingLexer<'input, #lexertypest>, - #span_var: ::cfgrammar::Span, - #parse_paramdef, - #(#args,)*)#returnt { - #bind_parse_param - #action_body - } - + action_fns.extend(quote! { + #[allow(clippy::too_many_arguments)] + fn #action_fn #generics ( + #ridx_var: ::cfgrammar::RIdx<#storaget>, + #lexer_var: &'lexer dyn ::lrpar::NonStreamingLexer<'input, #lexertypest>, + #span_var: ::cfgrammar::Span, + #parse_paramdef, + #(#args,)* + ) #returnt + #where_clause + { + #bind_parse_param + #action_body + } }) } Ok(quote! { @@ -1605,6 +1635,18 @@ fn indent(indent: &str, s: &str) -> String { format!("{indent}{}\n", s.trim_end_matches('\n')).replace('\n', &format!("\n{}", indent)) } +fn make_generics(parse_generics: Option<&str>) -> Result> { + if let Some(parse_generics) = parse_generics { + let tokens = str::parse::(parse_generics)?; + match syn::parse2(quote!(<'lexer, 'input: 'lexer, #tokens>)) { + Ok(res) => Ok(res), + Err(err) => Err(format!("unable to parse %parse-generics: {}", err).into()), + } + } else { + Ok(parse_quote!(<'lexer, 'input: 'lexer>)) + } +} + // Tests dealing with the filesystem not supported under wasm32 #[cfg(all(not(target_arch = "wasm32"), test))] mod test {