diff --git a/src/compiler.rs b/src/compiler.rs index e44e9bc..08effe1 100644 --- a/src/compiler.rs +++ b/src/compiler.rs @@ -25,6 +25,18 @@ impl Span { } } +#[derive(Debug, PartialEq)] +pub struct Spanned { + pub item: T, + pub span: Span, +} + +impl Spanned { + pub fn new(item: T, span: Span) -> Self { + Spanned { item, span } + } +} + #[derive(Clone)] pub struct Compiler { // Core information, indexed by NodeId: diff --git a/src/lexer.rs b/src/lexer.rs index 6a1429c..448641c 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -1,5 +1,13 @@ -use crate::compiler::Span; -use logos::Logos; +use crate::compiler::{Span, Spanned}; +use logos::{Lexer, Logos}; + +#[derive(Debug, Default, Copy, Clone, PartialEq)] +pub enum LexError { + #[default] + Generic, + UnmatchedStrInterpLParen, + UnmatchedStrInterpRParen, +} /// Average number of bytes per token used for estimating the tokens buffer size. /// @@ -124,42 +132,156 @@ impl Tokens { } } -/// Lex the source contents and return allocated Tokens. -/// -/// In the case of error, you can look up the last stored token to get a clue what went wrong. The -/// last stored token is always End Of File (EOF), so there will always be at least one token. -pub fn lex(contents: &[u8], span_offset: usize) -> (Tokens, Result<(), ()>) { - // TODO: We might require the contents to always end with a newline, in which case return an error - let mut tokens = Tokens::new(contents); - let lexer = Token::lexer(contents).spanned(); +// TODO: Deduplicate code between lex_internal_dq_string_interp() and lex_internal_sq_string_interp() +/// Lex the contents of a double-quoted string interpolation +fn lex_internal_dq_string_interp( + contents: &[u8], + span_offset: usize, + tokens: &mut Tokens, +) -> Result<(), Spanned> { + let lexer = DqStrInterpToken::lexer(contents).spanned(); + + for (res, span) in lexer { + let new_span = Span::new(span.start + span_offset, span.end + span_offset); + match res { + Ok(DqStrInterpToken::Start) => { + tokens.push(Token::DqStringInterpStart, new_span); + } + Ok(DqStrInterpToken::StringChunk) => { + tokens.push(Token::StrInterpChunk, new_span); + } + Ok(DqStrInterpToken::Subexpression) => { + tokens.push( + Token::StrInterpLParen, + Span::new(new_span.start, new_span.start + 1), + ); + + lex_internal( + &contents[span.start + 1..span.end - 1], + span_offset + span.start + 1, + tokens, + )?; + + tokens.push( + Token::StrInterpRParen, + Span::new(new_span.end - 1, new_span.end), + ); + } + Ok(DqStrInterpToken::End) => { + tokens.push(Token::StrInterpEnd, new_span); + return Ok(()); + } + Err(e) => { + return Err(Spanned::new(e, new_span)); + } + } + } + + Ok(()) +} + +// TODO: Deduplicate code between lex_internal_dq_string_interp() and lex_internal_sq_string_interp() +/// Lex the contents of a single-quoted string interpolation +fn lex_internal_sq_string_interp( + contents: &[u8], + span_offset: usize, + tokens: &mut Tokens, +) -> Result<(), Spanned> { + let lexer = SqStrInterpToken::lexer(contents).spanned(); for (res, span) in lexer { + let new_span = Span::new(span.start + span_offset, span.end + span_offset); match res { - Ok(token) => tokens.push( - token, - Span::new(span.start + span_offset, span.end + span_offset), - ), - Err(_) => { + Ok(SqStrInterpToken::Start) => { + tokens.push(Token::SqStringInterpStart, new_span); + } + Ok(SqStrInterpToken::StringChunk) => { + tokens.push(Token::StrInterpChunk, new_span); + } + Ok(SqStrInterpToken::Subexpression) => { tokens.push( - Token::Eof, - Span::new(span.end + span_offset, span.end + span_offset), + Token::StrInterpLParen, + Span::new(new_span.start, new_span.start + 1), ); - return (tokens, Err(())); + + lex_internal( + &contents[span.start + 1..span.end - 1], + span_offset + span.start + 1, + tokens, + )?; + + tokens.push( + Token::StrInterpRParen, + Span::new(new_span.end - 1, new_span.end), + ); + } + Ok(SqStrInterpToken::End) => { + tokens.push(Token::StrInterpEnd, new_span); + return Ok(()); + } + Err(e) => { + return Err(Spanned::new(e, new_span)); + } + } + } + + Ok(()) +} + +fn lex_internal( + contents: &[u8], + span_offset: usize, + tokens: &mut Tokens, +) -> Result<(), Spanned> { + let lexer = Token::lexer(contents).spanned(); + + for (res, span) in lexer { + let new_span = Span::new(span.start + span_offset, span.end + span_offset); + match res { + Ok(Token::DqStrInterp) => lex_internal_dq_string_interp( + &contents[span.start..span.end], + span_offset + span.start, + tokens, + )?, + Ok(Token::SqStrInterp) => lex_internal_sq_string_interp( + &contents[span.start..span.end], + span_offset + span.start, + tokens, + )?, + Ok(token) => tokens.push(token, new_span), + Err(e) => { + return Err(Spanned::new(e, new_span)); } } } + Ok(()) +} + +/// Lex the source contents and return allocated Tokens. +/// +/// In the case of error, you can look up the last stored token to get a clue what went wrong. The +/// last stored token is always End Of File (EOF), so there will always be at least one token. +pub fn lex(contents: &[u8], span_offset: usize) -> (Tokens, Result<(), Spanned>) { + // TODO: We might require the contents to always end with a newline, in which case return an error + let mut tokens = Tokens::new(contents); + let res = lex_internal(contents, span_offset, &mut tokens); + tokens.push( Token::Eof, Span::new(contents.len() + span_offset, contents.len() + span_offset), ); + if let Err(e) = res { + return (tokens, Err(e)); + } + (tokens, Ok(())) } #[derive(Logos, Debug, Clone, Copy, PartialEq)] #[logos(skip r"[ \t]+")] -#[logos(source = [u8])] +#[logos(source = [u8], error = LexError)] pub enum Token { #[regex("(0[xob])?[0-9][0-9_]*", priority = 10)] Int, @@ -286,17 +408,111 @@ pub enum Token { ErrGreaterThanPipe, #[token("o+e>|")] OutErrGreaterThanPipe, - /// End of file, doesn't match any syntax, but source code always end with it + /// Double quoted string interpolation $"..." + /// + /// The token is passed to a separate lexer and is not actually present in the result. + /// Unescaped double quotes are not permitted, for example, $"foo("bar")" is not allowed. + #[regex(r#"\$"([^"]|\\")*""#)] + DqStrInterp, + /// Single-quoted string interpolation $'...' + /// + /// The token is passed to a separate lexer and is not actually present in the result. + #[regex(r#"\$'[^']*'"#)] + SqStrInterp, + /// Start of double-quoted string interpoloation $" (returned from separate lexing) + DqStringInterpStart, + /// Start of single-quoted string interpoloation $' (returned from separate lexing) + SqStringInterpStart, + /// Non-interpolated string chunk within any string interpolation (returned from separate lexing) + /// + /// For example, "foo" within $"foo(1)" + StrInterpChunk, + /// Left parenthesis inside any string interpolation (returned from separate lexing) + StrInterpLParen, + /// Right parenthesis inside any string interpolation (returned from separate lexing) + StrInterpRParen, + /// End of any string interpolation (returned from separate lexing) + StrInterpEnd, + /// End of file, doesn't match any syntax, but lexed tokens always end with it Eof, } +fn match_subexpression<'a, T: Logos<'a>>( + remainder: &[u8], + lexer: &mut Lexer<'a, T>, +) -> Result<(), LexError> { + let mut depth = 1; + let mut pos = 0; + + while pos < remainder.len() { + match remainder[pos] { + b'(' => depth += 1, + b')' => depth -= 1, + _ => (), + } + + if depth == 0 { + break; + } + + if depth < 0 { + // unmatched ) + return Err(LexError::UnmatchedStrInterpRParen); + } + + pos += 1; + } + + if depth > 0 { + // unmatched ( + return Err(LexError::UnmatchedStrInterpLParen); + } + + lexer.bump(pos + 1); + Ok(()) +} + +/// Tokens representing double-quoted string interpolation +#[derive(Logos, Debug, Clone, Copy, PartialEq)] +#[logos(source = [u8], error = LexError)] +enum DqStrInterpToken { + #[token(r#"$""#)] + Start, + #[regex(r#"([^"\\\(]|\\["\\bnfrt\(])+"#)] + StringChunk, + #[token("(", |lex| match_subexpression(lex.remainder(), lex))] + Subexpression, + #[token(r#"""#)] + End, +} + +/// Tokens representing single-quoted string interpolation +#[derive(Logos, Debug, Clone, Copy, PartialEq)] +#[logos(source = [u8], error=LexError)] +enum SqStrInterpToken { + #[token(r#"$'"#)] + Start, + #[regex(r#"[^'\(]+"#)] + StringChunk, + #[token("(", |lex| match_subexpression(lex.remainder(), lex))] + Subexpression, + #[token(r#"'"#)] + End, +} + #[cfg(test)] mod test { /// Lexer tests useful for smaller sources, errors and corner cases - use crate::compiler::Span; + use crate::compiler::{Span, Spanned}; use crate::lexer::{lex, Token}; - fn test_lex(src: &[u8], expected_tokens: &[(Token, Span)], expected_result: Result<(), ()>) { + use super::LexError; + + fn test_lex( + src: &[u8], + expected_tokens: &[(Token, Span)], + expected_result: Result<(), Spanned>, + ) { let (mut actual_tokens, actual_result) = lex(src, 0); assert_eq!(expected_result, actual_result, "Lexing result mismatch"); @@ -320,6 +536,39 @@ mod test { #[test] fn lex_unmatched_string() { // TODO: Make unmatched delimiters nicer - test_lex(b"'unmatched string", &[(Token::Eof, span(17, 17))], Err(())); + test_lex( + b"'unmatched string", + &[(Token::Eof, span(17, 17))], + Err(Spanned::new(LexError::Generic, Span::new(0, 17))), + ); + } + + #[test] + fn lex_string_interp_errors() { + test_lex( + br#"$"foo("baz")bar""#, + &[ + (Token::DqStringInterpStart, span(0, 2)), + (Token::StrInterpChunk, span(2, 5)), + (Token::Eof, span(16, 16)), + ], + Err(Spanned::new( + LexError::UnmatchedStrInterpLParen, + Span::new(5, 6), + )), + ); + + test_lex( + br#"$'foo('baz')bar'"#, + &[ + (Token::SqStringInterpStart, span(0, 2)), + (Token::StrInterpChunk, span(2, 5)), + (Token::Eof, span(16, 16)), + ], + Err(Spanned::new( + LexError::UnmatchedStrInterpLParen, + Span::new(5, 6), + )), + ); } } diff --git a/src/main.rs b/src/main.rs index 60b3ac8..c79910b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -34,7 +34,13 @@ fn main() { let (tokens, err) = lex(&contents, span_offset); if let Err(e) = err { tokens.print(&compiler.source); - eprintln!("Lexing error. Error: {:?}", e); + eprintln!( + "Lexing error. Error: {:?}, '{}'", + e, + String::from_utf8_lossy( + compiler.get_span_contents_manual(e.span.start, e.span.end) + ) + ); exit(1); } diff --git a/src/snapshots/new_nu_parser__test__lexer@dq_string_interp.nu.snap b/src/snapshots/new_nu_parser__test__lexer@dq_string_interp.nu.snap new file mode 100644 index 0000000..3805a6c --- /dev/null +++ b/src/snapshots/new_nu_parser__test__lexer@dq_string_interp.nu.snap @@ -0,0 +1,115 @@ +--- +source: src/test.rs +expression: evaluate_lexer(path) +input_file: tests/lex/dq_string_interp.nu +--- +==== TOKENS ==== +Token3 0: DqStringInterpStart span: 0 .. 2 '$"' +Token3 1: StrInterpEnd span: 2 .. 3 '"' +Token3 2: Newline span: 3 .. 4 '\n' +Token3 3: DqStringInterpStart span: 4 .. 6 '$"' +Token3 4: StrInterpChunk span: 6 .. 9 'foo' +Token3 5: StrInterpEnd span: 9 .. 10 '"' +Token3 6: Newline span: 10 .. 11 '\n' +Token3 7: DqStringInterpStart span: 11 .. 13 '$"' +Token3 8: StrInterpChunk span: 13 .. 18 'foo\(' +Token3 9: StrInterpEnd span: 18 .. 19 '"' +Token3 10: Newline span: 19 .. 20 '\n' +Token3 11: DqStringInterpStart span: 20 .. 22 '$"' +Token3 12: StrInterpChunk span: 22 .. 28 'foo\()' +Token3 13: StrInterpEnd span: 28 .. 29 '"' +Token3 14: Newline span: 29 .. 30 '\n' +Token3 15: DqStringInterpStart span: 30 .. 32 '$"' +Token3 16: StrInterpChunk span: 32 .. 36 'foo)' +Token3 17: StrInterpEnd span: 36 .. 37 '"' +Token3 18: Newline span: 37 .. 38 '\n' +Token3 19: DqStringInterpStart span: 38 .. 40 '$"' +Token3 20: StrInterpLParen span: 40 .. 41 '(' +Token3 21: Int span: 41 .. 42 '1' +Token3 22: StrInterpRParen span: 42 .. 43 ')' +Token3 23: StrInterpChunk span: 43 .. 46 'bar' +Token3 24: StrInterpEnd span: 46 .. 47 '"' +Token3 25: Newline span: 47 .. 48 '\n' +Token3 26: DqStringInterpStart span: 48 .. 50 '$"' +Token3 27: StrInterpChunk span: 50 .. 53 'foo' +Token3 28: StrInterpLParen span: 53 .. 54 '(' +Token3 29: Int span: 54 .. 55 '1' +Token3 30: StrInterpRParen span: 55 .. 56 ')' +Token3 31: StrInterpChunk span: 56 .. 59 'bar' +Token3 32: StrInterpEnd span: 59 .. 60 '"' +Token3 33: Newline span: 60 .. 61 '\n' +Token3 34: DqStringInterpStart span: 61 .. 63 '$"' +Token3 35: StrInterpChunk span: 63 .. 66 'foo' +Token3 36: StrInterpLParen span: 66 .. 67 '(' +Token3 37: LParen span: 67 .. 68 '(' +Token3 38: Int span: 68 .. 69 '1' +Token3 39: RParen span: 69 .. 70 ')' +Token3 40: StrInterpRParen span: 70 .. 71 ')' +Token3 41: StrInterpChunk span: 71 .. 74 'bar' +Token3 42: StrInterpEnd span: 74 .. 75 '"' +Token3 43: Newline span: 75 .. 76 '\n' +Token3 44: DqStringInterpStart span: 76 .. 78 '$"' +Token3 45: StrInterpChunk span: 78 .. 81 'foo' +Token3 46: StrInterpLParen span: 81 .. 82 '(' +Token3 47: Int span: 82 .. 83 '1' +Token3 48: Plus span: 84 .. 85 '+' +Token3 49: LParen span: 86 .. 87 '(' +Token3 50: Int span: 87 .. 88 '2' +Token3 51: Plus span: 89 .. 90 '+' +Token3 52: Int span: 91 .. 92 '3' +Token3 53: RParen span: 92 .. 93 ')' +Token3 54: StrInterpRParen span: 93 .. 94 ')' +Token3 55: StrInterpChunk span: 94 .. 97 'bar' +Token3 56: StrInterpEnd span: 97 .. 98 '"' +Token3 57: Newline span: 98 .. 99 '\n' +Token3 58: DqStringInterpStart span: 99 .. 101 '$"' +Token3 59: StrInterpChunk span: 101 .. 104 'foo' +Token3 60: StrInterpLParen span: 104 .. 105 '(' +Token3 61: SingleQuotedString span: 105 .. 110 ''baz'' +Token3 62: StrInterpRParen span: 110 .. 111 ')' +Token3 63: StrInterpChunk span: 111 .. 114 'bar' +Token3 64: StrInterpEnd span: 114 .. 115 '"' +Token3 65: Newline span: 115 .. 116 '\n' +Token3 66: DqStringInterpStart span: 116 .. 118 '$"' +Token3 67: StrInterpChunk span: 118 .. 121 'foo' +Token3 68: StrInterpLParen span: 121 .. 122 '(' +Token3 69: StrInterpRParen span: 122 .. 123 ')' +Token3 70: StrInterpChunk span: 123 .. 126 'bar' +Token3 71: StrInterpEnd span: 126 .. 127 '"' +Token3 72: Newline span: 127 .. 128 '\n' +Token3 73: DqStringInterpStart span: 128 .. 130 '$"' +Token3 74: StrInterpChunk span: 130 .. 133 'foo' +Token3 75: StrInterpLParen span: 133 .. 134 '(' +Token3 76: Dollar span: 134 .. 135 '$' +Token3 77: Bareword span: 135 .. 138 'baz' +Token3 78: StrInterpRParen span: 138 .. 139 ')' +Token3 79: StrInterpChunk span: 139 .. 142 'bar' +Token3 80: StrInterpEnd span: 142 .. 143 '"' +Token3 81: Newline span: 143 .. 144 '\n' +Token3 82: DqStringInterpStart span: 144 .. 146 '$"' +Token3 83: StrInterpChunk span: 146 .. 155 'escapes\"' +Token3 84: StrInterpEnd span: 155 .. 156 '"' +Token3 85: Newline span: 156 .. 157 '\n' +Token3 86: DqStringInterpStart span: 157 .. 159 '$"' +Token3 87: StrInterpChunk span: 159 .. 168 'esc\"apes' +Token3 88: StrInterpEnd span: 168 .. 169 '"' +Token3 89: Newline span: 169 .. 170 '\n' +Token3 90: DqStringInterpStart span: 170 .. 172 '$"' +Token3 91: StrInterpChunk span: 172 .. 175 'foo' +Token3 92: StrInterpLParen span: 175 .. 176 '(' +Token3 93: SqStringInterpStart span: 176 .. 178 '$'' +Token3 94: StrInterpLParen span: 178 .. 179 '(' +Token3 95: Int span: 179 .. 180 '1' +Token3 96: Plus span: 181 .. 182 '+' +Token3 97: LParen span: 183 .. 184 '(' +Token3 98: Int span: 184 .. 185 '2' +Token3 99: Plus span: 186 .. 187 '+' +Token3 100: Int span: 188 .. 189 '3' +Token3 101: RParen span: 189 .. 190 ')' +Token3 102: StrInterpRParen span: 190 .. 191 ')' +Token3 103: StrInterpEnd span: 191 .. 192 ''' +Token3 104: StrInterpRParen span: 192 .. 193 ')' +Token3 105: StrInterpChunk span: 193 .. 196 'bar' +Token3 106: StrInterpEnd span: 196 .. 197 '"' +Token3 107: Newline span: 197 .. 198 '\n' +Token3 108: Eof span: 198 .. 198 '' diff --git a/src/snapshots/new_nu_parser__test__lexer@sq_string_interp.nu.snap b/src/snapshots/new_nu_parser__test__lexer@sq_string_interp.nu.snap new file mode 100644 index 0000000..560ec4f --- /dev/null +++ b/src/snapshots/new_nu_parser__test__lexer@sq_string_interp.nu.snap @@ -0,0 +1,82 @@ +--- +source: src/test.rs +expression: evaluate_lexer(path) +input_file: tests/lex/sq_string_interp.nu +--- +==== TOKENS ==== +Token3 0: SqStringInterpStart span: 0 .. 2 '$'' +Token3 1: StrInterpEnd span: 2 .. 3 ''' +Token3 2: Newline span: 3 .. 4 '\n' +Token3 3: SqStringInterpStart span: 4 .. 6 '$'' +Token3 4: StrInterpChunk span: 6 .. 9 'foo' +Token3 5: StrInterpEnd span: 9 .. 10 ''' +Token3 6: Newline span: 10 .. 11 '\n' +Token3 7: SqStringInterpStart span: 11 .. 13 '$'' +Token3 8: StrInterpChunk span: 13 .. 17 'foo)' +Token3 9: StrInterpEnd span: 17 .. 18 ''' +Token3 10: Newline span: 18 .. 19 '\n' +Token3 11: SqStringInterpStart span: 19 .. 21 '$'' +Token3 12: StrInterpLParen span: 21 .. 22 '(' +Token3 13: Int span: 22 .. 23 '1' +Token3 14: StrInterpRParen span: 23 .. 24 ')' +Token3 15: StrInterpChunk span: 24 .. 27 'bar' +Token3 16: StrInterpEnd span: 27 .. 28 ''' +Token3 17: Newline span: 28 .. 29 '\n' +Token3 18: SqStringInterpStart span: 29 .. 31 '$'' +Token3 19: StrInterpChunk span: 31 .. 34 'foo' +Token3 20: StrInterpLParen span: 34 .. 35 '(' +Token3 21: Int span: 35 .. 36 '1' +Token3 22: StrInterpRParen span: 36 .. 37 ')' +Token3 23: StrInterpChunk span: 37 .. 40 'bar' +Token3 24: StrInterpEnd span: 40 .. 41 ''' +Token3 25: Newline span: 41 .. 42 '\n' +Token3 26: SqStringInterpStart span: 42 .. 44 '$'' +Token3 27: StrInterpChunk span: 44 .. 47 'foo' +Token3 28: StrInterpLParen span: 47 .. 48 '(' +Token3 29: LParen span: 48 .. 49 '(' +Token3 30: Int span: 49 .. 50 '1' +Token3 31: RParen span: 50 .. 51 ')' +Token3 32: StrInterpRParen span: 51 .. 52 ')' +Token3 33: StrInterpChunk span: 52 .. 55 'bar' +Token3 34: StrInterpEnd span: 55 .. 56 ''' +Token3 35: Newline span: 56 .. 57 '\n' +Token3 36: SqStringInterpStart span: 57 .. 59 '$'' +Token3 37: StrInterpChunk span: 59 .. 62 'foo' +Token3 38: StrInterpLParen span: 62 .. 63 '(' +Token3 39: Int span: 63 .. 64 '1' +Token3 40: Plus span: 65 .. 66 '+' +Token3 41: LParen span: 67 .. 68 '(' +Token3 42: Int span: 68 .. 69 '3' +Token3 43: Plus span: 70 .. 71 '+' +Token3 44: Int span: 72 .. 73 '4' +Token3 45: RParen span: 73 .. 74 ')' +Token3 46: StrInterpRParen span: 74 .. 75 ')' +Token3 47: StrInterpChunk span: 75 .. 78 'bar' +Token3 48: StrInterpEnd span: 78 .. 79 ''' +Token3 49: Newline span: 79 .. 80 '\n' +Token3 50: SqStringInterpStart span: 80 .. 82 '$'' +Token3 51: StrInterpChunk span: 82 .. 85 'foo' +Token3 52: StrInterpLParen span: 85 .. 86 '(' +Token3 53: StrInterpRParen span: 86 .. 87 ')' +Token3 54: StrInterpChunk span: 87 .. 90 'bar' +Token3 55: StrInterpEnd span: 90 .. 91 ''' +Token3 56: Newline span: 91 .. 92 '\n' +Token3 57: SqStringInterpStart span: 92 .. 94 '$'' +Token3 58: StrInterpChunk span: 94 .. 97 'foo' +Token3 59: StrInterpLParen span: 97 .. 98 '(' +Token3 60: DqStringInterpStart span: 98 .. 100 '$"' +Token3 61: StrInterpLParen span: 100 .. 101 '(' +Token3 62: Int span: 101 .. 102 '1' +Token3 63: Plus span: 103 .. 104 '+' +Token3 64: LParen span: 105 .. 106 '(' +Token3 65: Int span: 106 .. 107 '2' +Token3 66: Plus span: 108 .. 109 '+' +Token3 67: Int span: 110 .. 111 '3' +Token3 68: RParen span: 111 .. 112 ')' +Token3 69: StrInterpRParen span: 112 .. 113 ')' +Token3 70: StrInterpEnd span: 113 .. 114 '"' +Token3 71: StrInterpRParen span: 114 .. 115 ')' +Token3 72: StrInterpChunk span: 115 .. 118 'bar' +Token3 73: StrInterpEnd span: 118 .. 119 ''' +Token3 74: Newline span: 119 .. 120 '\n' +Token3 75: Eof span: 120 .. 120 '' diff --git a/tests/lex/dq_string_interp.nu b/tests/lex/dq_string_interp.nu new file mode 100644 index 0000000..99806de --- /dev/null +++ b/tests/lex/dq_string_interp.nu @@ -0,0 +1,15 @@ +$"" +$"foo" +$"foo\(" +$"foo\()" +$"foo)" +$"(1)bar" +$"foo(1)bar" +$"foo((1))bar" +$"foo(1 + (2 + 3))bar" +$"foo('baz')bar" +$"foo()bar" +$"foo($baz)bar" +$"escapes\"" +$"esc\"apes" +$"foo($'(1 + (2 + 3))')bar" diff --git a/tests/lex/sq_string_interp.nu b/tests/lex/sq_string_interp.nu new file mode 100644 index 0000000..b5b4aff --- /dev/null +++ b/tests/lex/sq_string_interp.nu @@ -0,0 +1,9 @@ +$'' +$'foo' +$'foo)' +$'(1)bar' +$'foo(1)bar' +$'foo((1))bar' +$'foo(1 + (3 + 4))bar' +$'foo()bar' +$'foo($"(1 + (2 + 3))")bar'