Skip to content

Commit dcfab46

Browse files
committed
Consider also number for tokens before format specifiers
1 parent 5c0de00 commit dcfab46

File tree

3 files changed

+24
-4
lines changed

3 files changed

+24
-4
lines changed

src/formatter.rs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,8 @@ impl<'a> Formatter<'a> {
369369
];
370370

371371
const ADD_WHITESPACE_BETWEEN: &[TokenKind] = &[TokenKind::CloseParen, TokenKind::Reserved];
372-
372+
const BEFORE_ARRAY: &[TokenKind] =
373+
&[TokenKind::CloseParen, TokenKind::Word, TokenKind::Reserved];
373374
let inlined = self.inline_block.begin_if_possible(self.tokens, self.index);
374375
let previous_non_whitespace_token = self.previous_non_whitespace_token(1);
375376
let fold_in_top_level = !inlined
@@ -387,13 +388,16 @@ impl<'a> Formatter<'a> {
387388
// Take out the preceding space unless there was whitespace there in the original query
388389
// or another opening parens or line comment
389390
let previous_token = self.previous_token(1);
390-
if previous_token.is_none()
391-
|| !PRESERVE_WHITESPACE_FOR.contains(&previous_token.unwrap().kind)
391+
if previous_token.is_none_or(|t| !PRESERVE_WHITESPACE_FOR.contains(&t.kind))
392+
|| previous_non_whitespace_token
393+
.is_some_and(|t| token.value == "[" && BEFORE_ARRAY.contains(&t.kind))
392394
{
393395
self.trim_spaces_end(query);
394396
}
395397

396-
if previous_non_whitespace_token.is_some_and(|t| ADD_WHITESPACE_BETWEEN.contains(&t.kind)) {
398+
if previous_non_whitespace_token
399+
.is_some_and(|t| token.value != "[" && ADD_WHITESPACE_BETWEEN.contains(&t.kind))
400+
{
397401
self.trim_spaces_end(query);
398402
query.push(' ');
399403
}

src/lib.rs

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -554,6 +554,21 @@ mod tests {
554554
assert_eq!(format(input, &QueryParams::None, &options), expected);
555555
}
556556

557+
#[test]
558+
fn it_formats_array_index_notation() {
559+
let input = "SELECT a [ 1 ] + b [ 2 ] [ 5+1 ] > c [3] ;";
560+
let options = FormatOptions {
561+
dialect: Dialect::PostgreSql,
562+
..Default::default()
563+
};
564+
let expected = indoc!(
565+
"
566+
SELECT
567+
a[1] + b[2][5 + 1] > c[3];"
568+
);
569+
570+
assert_eq!(format(input, &QueryParams::None, &options), expected);
571+
}
557572
#[test]
558573
fn it_formats_limit_of_single_value_and_offset() {
559574
let input = "LIMIT 5 OFFSET 8;";

src/tokenizer.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,7 @@ fn get_type_specifier_token<'i>(
159159
TokenKind::Placeholder,
160160
TokenKind::Reserved,
161161
TokenKind::String,
162+
TokenKind::Number,
162163
TokenKind::TypeSpecifier,
163164
TokenKind::Word,
164165
]

0 commit comments

Comments
 (0)