diff --git a/src/dialect/mod.rs b/src/dialect/mod.rs index f40cba719..005722c31 100644 --- a/src/dialect/mod.rs +++ b/src/dialect/mod.rs @@ -592,6 +592,10 @@ pub trait Dialect: Debug + Any { false } + fn supports_dollar_quoted_string(&self) -> bool { + true + } + /// Does the dialect support with clause in create index statement? /// e.g. `CREATE INDEX idx ON t WITH (key = value, key2)` fn supports_create_index_with_clause(&self) -> bool { diff --git a/src/dialect/sqlite.rs b/src/dialect/sqlite.rs index 95717f9fd..09df6036e 100644 --- a/src/dialect/sqlite.rs +++ b/src/dialect/sqlite.rs @@ -81,4 +81,8 @@ impl Dialect for SQLiteDialect { fn supports_asc_desc_in_column_definition(&self) -> bool { true } + + fn supports_dollar_quoted_string(&self) -> bool { + false + } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index aacfc16fa..41cc7f2d6 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1473,7 +1473,9 @@ impl<'a> Tokenizer<'a> { chars.next(); - if let Some('$') = chars.peek() { + // Check if the second character is a dollar sign + let next_is_dollar = matches!(chars.peek(), Some('$')); + if next_is_dollar && self.dialect.supports_dollar_quoted_string() { chars.next(); let mut is_terminated = false; @@ -1507,10 +1509,13 @@ impl<'a> Tokenizer<'a> { }; } else { value.push_str(&peeking_take_while(chars, |ch| { - ch.is_alphanumeric() || ch == '_' + ch.is_alphanumeric() + || ch == '_' + || matches!(ch, '$' if !self.dialect.supports_dollar_quoted_string()) })); - if let Some('$') = chars.peek() { + let next_is_dollar = matches!(chars.peek(), Some('$')); + if next_is_dollar && self.dialect.supports_dollar_quoted_string() { chars.next(); 'searching_for_end: loop { @@ -2080,7 +2085,7 @@ fn take_char_from_hex_digits( mod tests { use super::*; use crate::dialect::{ - BigQueryDialect, ClickHouseDialect, HiveDialect, MsSqlDialect, MySqlDialect, + BigQueryDialect, ClickHouseDialect, HiveDialect, MsSqlDialect, MySqlDialect, SQLiteDialect, }; use core::fmt::Debug; @@ -2516,6 +2521,30 @@ mod tests { ); } + #[test] + fn tokenize_dollar_placeholder_sqlite() { + let sql = String::from("SELECT $$, $$ABC$$, $ABC$, $ABC"); + let dialect = SQLiteDialect {}; + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + assert_eq!( + tokens, + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$$".into()), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$$ABC$$".into()), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$ABC$".into()), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$ABC".into()), + ] + ); + } + #[test] fn tokenize_dollar_quoted_string_untagged() { let sql = diff --git a/tests/sqlparser_sqlite.rs b/tests/sqlparser_sqlite.rs index 987b1263d..e8bd42236 100644 --- a/tests/sqlparser_sqlite.rs +++ b/tests/sqlparser_sqlite.rs @@ -570,6 +570,16 @@ fn test_dollar_identifier_as_placeholder() { } _ => unreachable!(), } + + // $$ is a valid placeholder in SQLite + match sqlite().verified_expr("id = $$") { + Expr::BinaryOp { op, left, right } => { + assert_eq!(op, BinaryOperator::Eq); + assert_eq!(left, Box::new(Expr::Identifier(Ident::new("id")))); + assert_eq!(right, Box::new(Expr::Value(Placeholder("$$".to_string())))); + } + _ => unreachable!(), + } } fn sqlite() -> TestedDialects {