Skip to content

Commit fa6ddde

Browse files
authored
Merge pull request #48 from nix-community/float-starting-with-dot
Floats without trailing zeros (e.g. `.5`) are valid Nix code
2 parents c917751 + a168ecb commit fa6ddde

File tree

1 file changed

+18
-1
lines changed

1 file changed

+18
-1
lines changed

src/tokenizer.rs

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,14 @@ impl<'a> Iterator for Tokenizer<'a> {
309309
'@' => Some((TOKEN_AT, self.string_since(start))),
310310
':' => Some((TOKEN_COLON, self.string_since(start))),
311311
',' => Some((TOKEN_COMMA, self.string_since(start))),
312-
'.' => Some((TOKEN_DOT, self.string_since(start))),
312+
'.' => {
313+
if self.peek().map(|x| x >= '0' && x <= '9').unwrap_or(false) {
314+
self.consume(|c| c >= '0' && c <= '9');
315+
Some((TOKEN_FLOAT, self.string_since(start)))
316+
} else {
317+
Some((TOKEN_DOT, self.string_since(start)))
318+
}
319+
},
313320
'=' => Some((TOKEN_ASSIGN, self.string_since(start))),
314321
'?' => Some((TOKEN_QUESTION, self.string_since(start))),
315322
';' => Some((TOKEN_SEMICOLON, self.string_since(start))),
@@ -486,6 +493,16 @@ mod tests {
486493
(TOKEN_CURLY_B_CLOSE, "}"),
487494
],
488495
);
496+
assert_eq!(
497+
tokenize(".5 + 0.5"),
498+
tokens![
499+
(TOKEN_FLOAT, ".5"),
500+
(TOKEN_WHITESPACE, " "),
501+
(TOKEN_ADD, "+"),
502+
(TOKEN_WHITESPACE, " "),
503+
(TOKEN_FLOAT, "0.5"),
504+
],
505+
);
489506
assert_eq!(
490507
tokenize("{ scientific = 1.1e4; uppercase = 123.4E-2; }"),
491508
tokens![

0 commit comments

Comments
 (0)