Skip to content

Commit 1176dcd

Browse files
committed
feat: Correctly format array type specifier
1 parent d9f8591 commit 1176dcd

File tree

3 files changed

+51
-14
lines changed

3 files changed

+51
-14
lines changed

src/formatter.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,6 @@ pub(crate) fn format(
7676
formatter.format_no_change(token, &mut formatted_query);
7777
continue;
7878
}
79-
8079
match token.kind {
8180
TokenKind::Whitespace => {
8281
// ignore (we do our own whitespace formatting)
@@ -112,8 +111,8 @@ pub(crate) fn format(
112111
TokenKind::Placeholder => {
113112
formatter.format_placeholder(token, &mut formatted_query);
114113
}
115-
TokenKind::DoubleColon => {
116-
formatter.format_double_colon(token, &mut formatted_query);
114+
TokenKind::TypeSpecifier => {
115+
formatter.format_type_specifier(token, &mut formatted_query);
117116
}
118117
_ => match token.value {
119118
"," => {
@@ -189,9 +188,9 @@ impl<'a> Formatter<'a> {
189188
self.add_new_line(query);
190189
}
191190

192-
fn format_double_colon(&self, _token: &Token<'_>, query: &mut String) {
191+
fn format_type_specifier(&self, token: &Token<'_>, query: &mut String) {
193192
self.trim_all_spaces_end(query);
194-
query.push_str("::");
193+
query.push_str(token.value);
195194
}
196195
fn format_block_comment(&mut self, token: &Token<'_>, query: &mut String) {
197196
self.add_new_line(query);

src/lib.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -397,6 +397,22 @@ mod tests {
397397
assert_eq!(format(input, &QueryParams::None, &options), expected);
398398
}
399399

400+
#[test]
401+
fn it_formats_type_specifiers() {
402+
let input = "SELECT id, ARRAY [] :: UUID [] FROM UNNEST($1 :: UUID []);";
403+
let options = FormatOptions::default();
404+
let expected = indoc!(
405+
"
406+
SELECT
407+
id,
408+
ARRAY[]::UUID[]
409+
FROM
410+
UNNEST($1::UUID[]);"
411+
);
412+
413+
assert_eq!(format(input, &QueryParams::None, &options), expected);
414+
}
415+
400416
#[test]
401417
fn it_formats_limit_of_single_value_and_offset() {
402418
let input = "LIMIT 5 OFFSET 8;";

src/tokenizer.rs

Lines changed: 31 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ pub(crate) fn tokenize<'a>(
1717
) -> Vec<Token<'a>> {
1818
let mut tokens: Vec<Token> = Vec::new();
1919

20+
let mut last_non_whitespace_token = None;
2021
let mut last_reserved_token = None;
2122
let mut last_reserved_top_level_token = None;
2223

@@ -27,7 +28,7 @@ pub(crate) fn tokenize<'a>(
2728
// Keep processing the string until it is empty
2829
while let Ok(mut result) = get_next_token(
2930
&mut input,
30-
tokens.last().cloned(),
31+
last_non_whitespace_token.clone(),
3132
last_reserved_token.clone(),
3233
last_reserved_top_level_token.clone(),
3334
named_placeholders,
@@ -49,6 +50,10 @@ pub(crate) fn tokenize<'a>(
4950
_ => {}
5051
}
5152

53+
if result.kind != TokenKind::Whitespace {
54+
last_non_whitespace_token = Some(result.clone());
55+
}
56+
5257
tokens.push(result);
5358

5459
if let Ok(Some(result)) = opt(get_whitespace_token).parse_next(&mut input) {
@@ -68,7 +73,7 @@ pub(crate) struct Token<'a> {
6873

6974
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
7075
pub(crate) enum TokenKind {
71-
DoubleColon,
76+
TypeSpecifier,
7277
Whitespace,
7378
String,
7479
Reserved,
@@ -119,6 +124,7 @@ fn get_next_token<'a>(
119124
) -> Result<Token<'a>> {
120125
alt((
121126
get_comment_token,
127+
|input: &mut _| get_type_specifier_token(input, previous_token.clone()),
122128
get_string_token,
123129
get_open_paren_token,
124130
get_close_paren_token,
@@ -131,20 +137,36 @@ fn get_next_token<'a>(
131137
last_reserved_top_level_token.clone(),
132138
)
133139
},
134-
get_double_colon_token,
135140
get_operator_token,
136141
|input: &mut _| get_placeholder_token(input, named_placeholders),
137142
get_word_token,
138143
get_any_other_char,
139144
))
140145
.parse_next(input)
141146
}
142-
fn get_double_colon_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
143-
"::".parse_next(input).map(|token| Token {
144-
kind: TokenKind::DoubleColon,
145-
value: token,
146-
key: None,
147-
})
147+
fn get_type_specifier_token<'i>(
148+
input: &mut &'i str,
149+
previous_token: Option<Token<'i>>,
150+
) -> Result<Token<'i>> {
151+
if previous_token.is_some_and(|token| {
152+
![
153+
TokenKind::CloseParen,
154+
TokenKind::Placeholder,
155+
TokenKind::Reserved,
156+
TokenKind::String,
157+
TokenKind::TypeSpecifier,
158+
TokenKind::Word,
159+
]
160+
.contains(&token.kind)
161+
}) {
162+
fail.parse_next(input)
163+
} else {
164+
alt(("::", "[]")).parse_next(input).map(|token| Token {
165+
kind: TokenKind::TypeSpecifier,
166+
value: token,
167+
key: None,
168+
})
169+
}
148170
}
149171
fn get_whitespace_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
150172
take_while(1.., char::is_whitespace)

0 commit comments

Comments
 (0)