Skip to content

Commit d5424cd

Browse files
authored
Correctly format array type specifier (#91)
1 parent d9f8591 commit d5424cd

File tree

6 files changed

+60
-32
lines changed

6 files changed

+60
-32
lines changed

.github/workflows/sqlformat.yml

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ jobs:
2020
- latest-nightly
2121
include:
2222
- conf: minimum
23-
toolchain: 1.65.0
23+
toolchain: 1.84.0
2424
- conf: latest-stable
2525
toolchain: stable
2626
- conf: latest-beta
@@ -50,16 +50,7 @@ jobs:
5050
uses: actions-rs-plus/clippy-check@v2.3.0
5151
with:
5252
args: --all -- -D warnings
53-
# FIXME: criterion and its dependencies require a newer version than 1.65, but it is only used for benchmarks.
54-
# Is there a way to not have criterion built when we run tests?
55-
- name: Run cargo check
56-
if: matrix.toolchain == '1.65.0'
57-
run: cargo check
5853
- name: Run tests
59-
if: matrix.toolchain != '1.65.0'
6054
run: cargo test
61-
- name: Build benchmarks
62-
if: matrix.toolchain == 'stable'
63-
run: cargo bench --no-run
6455
- name: Build docs
6556
run: cargo doc --no-deps

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ name = "sqlformat"
33
version = "0.3.5"
44
authors = ["Josh Holmer <jholmer.in@gmail.com>"]
55
edition = "2021"
6-
rust-version = "1.65"
6+
rust-version = "1.84"
77
license = "MIT OR Apache-2.0"
88
homepage = "https://github.com/shssoichiro/sqlformat-rs"
99
repository = "https://github.com/shssoichiro/sqlformat-rs"

src/formatter.rs

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,6 @@ pub(crate) fn format(
7676
formatter.format_no_change(token, &mut formatted_query);
7777
continue;
7878
}
79-
8079
match token.kind {
8180
TokenKind::Whitespace => {
8281
// ignore (we do our own whitespace formatting)
@@ -112,8 +111,8 @@ pub(crate) fn format(
112111
TokenKind::Placeholder => {
113112
formatter.format_placeholder(token, &mut formatted_query);
114113
}
115-
TokenKind::DoubleColon => {
116-
formatter.format_double_colon(token, &mut formatted_query);
114+
TokenKind::TypeSpecifier => {
115+
formatter.format_type_specifier(token, &mut formatted_query);
117116
}
118117
_ => match token.value {
119118
"," => {
@@ -166,11 +165,11 @@ impl<'a> Formatter<'a> {
166165

167166
fn format_line_comment(&mut self, token: &Token<'_>, query: &mut String) {
168167
let is_whitespace_followed_by_special_token =
169-
self.next_token(1).map_or(false, |current_token| {
168+
self.next_token(1).is_some_and(|current_token| {
170169
current_token.kind == TokenKind::Whitespace
171-
&& self.next_token(2).map_or(false, |next_token| {
172-
!matches!(next_token.kind, TokenKind::Operator)
173-
})
170+
&& self
171+
.next_token(2)
172+
.is_some_and(|next_token| !matches!(next_token.kind, TokenKind::Operator))
174173
});
175174

176175
let previous_token = self.previous_token(1);
@@ -189,9 +188,9 @@ impl<'a> Formatter<'a> {
189188
self.add_new_line(query);
190189
}
191190

192-
fn format_double_colon(&self, _token: &Token<'_>, query: &mut String) {
191+
fn format_type_specifier(&self, token: &Token<'_>, query: &mut String) {
193192
self.trim_all_spaces_end(query);
194-
query.push_str("::");
193+
query.push_str(token.value);
195194
}
196195
fn format_block_comment(&mut self, token: &Token<'_>, query: &mut String) {
197196
self.add_new_line(query);
@@ -211,7 +210,7 @@ impl<'a> Formatter<'a> {
211210
true,
212211
self.options
213212
.max_inline_top_level
214-
.map_or(true, |limit| limit < span_len),
213+
.is_none_or(|limit| limit < span_len),
215214
)
216215
}
217216
}
@@ -254,7 +253,7 @@ impl<'a> Formatter<'a> {
254253
&& self
255254
.options
256255
.max_inline_arguments
257-
.map_or(true, |limit| limit < self.indentation.span())
256+
.is_none_or(|limit| limit < self.indentation.span())
258257
{
259258
self.add_new_line(query);
260259
} else {

src/lib.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -397,6 +397,22 @@ mod tests {
397397
assert_eq!(format(input, &QueryParams::None, &options), expected);
398398
}
399399

400+
#[test]
401+
fn it_formats_type_specifiers() {
402+
let input = "SELECT id, ARRAY [] :: UUID [] FROM UNNEST($1 :: UUID []);";
403+
let options = FormatOptions::default();
404+
let expected = indoc!(
405+
"
406+
SELECT
407+
id,
408+
ARRAY[]::UUID[]
409+
FROM
410+
UNNEST($1::UUID[]);"
411+
);
412+
413+
assert_eq!(format(input, &QueryParams::None, &options), expected);
414+
}
415+
400416
#[test]
401417
fn it_formats_limit_of_single_value_and_offset() {
402418
let input = "LIMIT 5 OFFSET 8;";

src/params.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ impl<'a> Params<'a> {
1212
}
1313

1414
pub fn get(&mut self, token: &'a Token<'a>) -> &'a str {
15-
let named_placeholder_token = token.key.as_ref().map_or(false, |key| key.named() != "");
15+
let named_placeholder_token = token.key.as_ref().is_some_and(|key| key.named() != "");
1616

1717
match self.params {
1818
QueryParams::Named(params) => token

src/tokenizer.rs

Lines changed: 31 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ pub(crate) fn tokenize<'a>(
1717
) -> Vec<Token<'a>> {
1818
let mut tokens: Vec<Token> = Vec::new();
1919

20+
let mut last_non_whitespace_token = None;
2021
let mut last_reserved_token = None;
2122
let mut last_reserved_top_level_token = None;
2223

@@ -27,7 +28,7 @@ pub(crate) fn tokenize<'a>(
2728
// Keep processing the string until it is empty
2829
while let Ok(mut result) = get_next_token(
2930
&mut input,
30-
tokens.last().cloned(),
31+
last_non_whitespace_token.clone(),
3132
last_reserved_token.clone(),
3233
last_reserved_top_level_token.clone(),
3334
named_placeholders,
@@ -49,6 +50,10 @@ pub(crate) fn tokenize<'a>(
4950
_ => {}
5051
}
5152

53+
if result.kind != TokenKind::Whitespace {
54+
last_non_whitespace_token = Some(result.clone());
55+
}
56+
5257
tokens.push(result);
5358

5459
if let Ok(Some(result)) = opt(get_whitespace_token).parse_next(&mut input) {
@@ -68,7 +73,7 @@ pub(crate) struct Token<'a> {
6873

6974
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
7075
pub(crate) enum TokenKind {
71-
DoubleColon,
76+
TypeSpecifier,
7277
Whitespace,
7378
String,
7479
Reserved,
@@ -119,6 +124,7 @@ fn get_next_token<'a>(
119124
) -> Result<Token<'a>> {
120125
alt((
121126
get_comment_token,
127+
|input: &mut _| get_type_specifier_token(input, previous_token.clone()),
122128
get_string_token,
123129
get_open_paren_token,
124130
get_close_paren_token,
@@ -131,20 +137,36 @@ fn get_next_token<'a>(
131137
last_reserved_top_level_token.clone(),
132138
)
133139
},
134-
get_double_colon_token,
135140
get_operator_token,
136141
|input: &mut _| get_placeholder_token(input, named_placeholders),
137142
get_word_token,
138143
get_any_other_char,
139144
))
140145
.parse_next(input)
141146
}
142-
fn get_double_colon_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
143-
"::".parse_next(input).map(|token| Token {
144-
kind: TokenKind::DoubleColon,
145-
value: token,
146-
key: None,
147-
})
147+
fn get_type_specifier_token<'i>(
148+
input: &mut &'i str,
149+
previous_token: Option<Token<'i>>,
150+
) -> Result<Token<'i>> {
151+
if previous_token.is_some_and(|token| {
152+
![
153+
TokenKind::CloseParen,
154+
TokenKind::Placeholder,
155+
TokenKind::Reserved,
156+
TokenKind::String,
157+
TokenKind::TypeSpecifier,
158+
TokenKind::Word,
159+
]
160+
.contains(&token.kind)
161+
}) {
162+
fail.parse_next(input)
163+
} else {
164+
alt(("::", "[]")).parse_next(input).map(|token| Token {
165+
kind: TokenKind::TypeSpecifier,
166+
value: token,
167+
key: None,
168+
})
169+
}
148170
}
149171
fn get_whitespace_token<'i>(input: &mut &'i str) -> Result<Token<'i>> {
150172
take_while(1.., char::is_whitespace)

0 commit comments

Comments
 (0)