Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/formatter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ impl<'a> Formatter<'a> {
self.add_new_line(query);
}
query.push_str(&self.equalize_whitespace(&self.format_reserved_word(token.value)));
if newline_after {
if newline_after && token.alias != "CREATE" {
self.indentation.increase_top_level(span_info);
self.add_new_line(query);
} else {
Expand Down
19 changes: 19 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -838,6 +838,25 @@ mod tests {
assert_eq!(format(input, &QueryParams::None, &options), expected);
}

#[test]
fn it_formats_full_delete_query() {
let input =
"DELETE FROM Customers USING Phonebook WHERE CustomerName='Alfred' AND Phone=5002132;";
let options = FormatOptions::default();
let expected = indoc!(
"
DELETE FROM
Customers
USING
Phonebook
WHERE
CustomerName = 'Alfred'
AND Phone = 5002132;"
);

assert_eq!(format(input, &QueryParams::None, &options), expected);
}

#[test]
fn it_formats_simple_drop_query() {
let input = "DROP TABLE IF EXISTS admin_role;";
Expand Down
94 changes: 77 additions & 17 deletions src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ pub(crate) struct Token<'a> {
pub value: &'a str,
// Only used for placeholder--there is a reason this isn't on the enum
pub key: Option<PlaceholderKind<'a>>,
/// Used to group the behaviour of variants of tokens
pub alias: &'a str,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
Expand Down Expand Up @@ -166,6 +168,7 @@ fn get_type_specifier_token<'i>(
kind: TokenKind::TypeSpecifier,
value: token,
key: None,
alias: token,
})
}
}
Expand All @@ -176,6 +179,7 @@ fn get_whitespace_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::Whitespace,
value: token,
key: None,
alias: token,
})
}

Expand All @@ -192,6 +196,7 @@ fn get_comment_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind,
value: token,
key: None,
alias: token,
})
}

Expand Down Expand Up @@ -250,6 +255,7 @@ fn get_string_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::String,
value: token,
key: None,
alias: token,
})
}

Expand All @@ -269,6 +275,7 @@ fn get_placeholder_string_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::String,
value: token,
key: None,
alias: token,
})
}

Expand All @@ -279,6 +286,7 @@ fn get_open_paren_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::OpenParen,
value: token,
key: None,
alias: token,
})
}

Expand All @@ -289,6 +297,7 @@ fn get_close_paren_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::CloseParen,
value: token,
key: None,
alias: token,
})
}

Expand Down Expand Up @@ -332,6 +341,7 @@ fn get_indexed_placeholder_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
} else {
None
},
alias: token,
})
}

Expand All @@ -350,6 +360,7 @@ fn get_ident_named_placeholder_token<'i>(input: &mut &'i str) -> Result<Token<'i
kind: TokenKind::Placeholder,
value: token,
key: Some(PlaceholderKind::Named(index)),
alias: token,
}
})
}
Expand All @@ -365,6 +376,7 @@ fn get_string_named_placeholder_token<'i>(input: &mut &'i str) -> Result<Token<'
kind: TokenKind::Placeholder,
value: token,
key: Some(PlaceholderKind::Named(index)),
alias: token,
}
})
}
Expand All @@ -381,6 +393,7 @@ fn get_number_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::Number,
value: token,
key: None,
alias: token,
})
}

Expand Down Expand Up @@ -464,6 +477,24 @@ fn get_top_level_reserved_token<'a>(
))
.parse_next(&mut uc_input),

'C' => terminated(
(
"CREATE ",
opt(alt((
"UNLOGGED ",
(
alt(("GLOBAL ", "LOCAL ")),
opt(alt(("TEMPORARY ", "TEMP "))),
)
.take(),
))),
"TABLE",
)
.take(),
end_of_word,
)
.parse_next(&mut uc_input),

'D' => terminated("DELETE FROM", end_of_word).parse_next(&mut uc_input),

'E' => terminated("EXCEPT", end_of_word).parse_next(&mut uc_input),
Expand All @@ -490,7 +521,11 @@ fn get_top_level_reserved_token<'a>(

'L' => terminated("LIMIT", end_of_word).parse_next(&mut uc_input),

'M' => terminated("MODIFY", end_of_word).parse_next(&mut uc_input),
'M' => alt((
terminated("MODIFY", end_of_word),
terminated("MERGE INTO", end_of_word),
))
.parse_next(&mut uc_input),

'O' => alt((
terminated("ORDER BY", end_of_word),
Expand All @@ -512,7 +547,11 @@ fn get_top_level_reserved_token<'a>(
))
.parse_next(&mut uc_input),

'U' => terminated("UPDATE", end_of_word).parse_next(&mut uc_input),
'U' => alt((
terminated("UPDATE", end_of_word),
terminated("USING", end_of_word),
))
.parse_next(&mut uc_input),

'V' => terminated("VALUES", end_of_word).parse_next(&mut uc_input),

Expand All @@ -529,27 +568,35 @@ fn get_top_level_reserved_token<'a>(
if let Ok(token) = result {
let token = finalize(input, token);

let kind = match token {
"EXCEPT"
if last_reserved_top_level_token.is_some()
&& last_reserved_top_level_token.as_ref().unwrap().value == "SELECT" =>
// If the query state doesn't allow EXCEPT, treat it as a regular word
let kind = match (
token,
last_reserved_top_level_token.as_ref().map(|v| v.alias),
) {
("EXCEPT", Some("SELECT")) =>
// If the query state doesn't allow EXCEPT, treat it as a reserved word
{
TokenKind::Word
TokenKind::Reserved
}
"SET"
if last_reserved_top_level_token.is_some()
&& last_reserved_top_level_token.as_ref().unwrap().value == "UPDATE" =>
{
TokenKind::ReservedNewlineAfter
("SET", Some("UPDATE")) => TokenKind::ReservedNewlineAfter,
("USING", v) if v != Some("MERGE INTO") && v != Some("DELETE FROM") => {
TokenKind::Reserved
}
_ => TokenKind::ReservedTopLevel,
};

let alias = if token.starts_with("CREATE") {
"CREATE"
} else if token.starts_with("SELECT") {
"SELECT"
} else {
token
};

Ok(Token {
kind,
value: token,
key: None,
alias,
})
} else {
Err(ParserError::from_input(input))
Expand Down Expand Up @@ -613,6 +660,7 @@ fn get_join_token<'a>() -> impl Parser<&'a str, Token<'a>, ContextError> {
kind,
value: token,
key: None,
alias: token,
})
} else {
Err(ParserError::from_input(input))
Expand All @@ -633,10 +681,12 @@ fn get_newline_after_reserved_token<'a>() -> impl Parser<&'a str, Token<'a>, Con
let result: Result<&str> = on_conflict.parse_next(&mut uc_input);

if let Ok(token) = result {
let value = finalize(input, token);
Ok(Token {
kind: TokenKind::ReservedNewlineAfter,
value: finalize(input, token),
value,
key: None,
alias: value,
})
} else {
Err(ParserError::from_input(input))
Expand Down Expand Up @@ -692,6 +742,7 @@ fn get_newline_reserved_token<'a>(
kind,
value: token,
key: None,
alias: token,
})
} else {
Err(ParserError::from_input(input))
Expand All @@ -716,10 +767,12 @@ fn get_top_level_reserved_token_no_indent<'i>(input: &mut &'i str) -> Result<Tok
))
.parse_next(&mut uc_input);
if let Ok(token) = result {
let value = finalize(input, token);
Ok(Token {
kind: TokenKind::ReservedTopLevelNoIndent,
value: finalize(input, token),
value,
key: None,
alias: value,
})
} else {
Err(ParserError::from_input(input))
Expand Down Expand Up @@ -1080,6 +1133,7 @@ fn get_plain_reserved_one_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
terminated("TRUNCATE", end_of_word),
terminated("TYPE", end_of_word),
terminated("TYPES", end_of_word),
terminated("TBLPROPERTIES", end_of_word),
))
.parse_next(&mut uc_input),

Expand All @@ -1090,7 +1144,6 @@ fn get_plain_reserved_one_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
terminated("UNSIGNED", end_of_word),
terminated("USAGE", end_of_word),
terminated("USE", end_of_word),
terminated("USING", end_of_word),
))
.parse_next(&mut uc_input),

Expand Down Expand Up @@ -1118,6 +1171,7 @@ fn get_plain_reserved_one_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::Reserved,
value: token,
key: None,
alias: token,
})
} else {
Err(ParserError::from_input(input))
Expand All @@ -1134,13 +1188,16 @@ fn get_plain_reserved_two_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
terminated("ON DELETE", end_of_word),
terminated("ON UPDATE", end_of_word),
terminated("DISTINCT FROM", end_of_word),
terminated("PARTITIONED BY", end_of_word),
))
.parse_next(&mut uc_input);
if let Ok(token) = result {
let value = finalize(input, token);
Ok(Token {
kind: TokenKind::Reserved,
value: finalize(input, token),
value,
key: None,
alias: value,
})
} else {
Err(ParserError::from_input(input))
Expand All @@ -1154,6 +1211,7 @@ fn get_word_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::Word,
value: token,
key: None,
alias: token,
})
}

Expand All @@ -1168,6 +1226,7 @@ fn get_operator_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::Operator,
value: token,
key: None,
alias: token,
})
.parse_next(input)
}
Expand All @@ -1179,6 +1238,7 @@ fn get_any_other_char<'i>(input: &mut &'i str) -> Result<Token<'i>> {
kind: TokenKind::Operator,
value: token,
key: None,
alias: token,
})
}

Expand Down
Loading