Skip to content

Commit dcd0cab

Browse files
check
1 parent 4586e33 commit dcd0cab

File tree

5 files changed

+52
-71
lines changed

5 files changed

+52
-71
lines changed

crates/djls-template-ast/src/parser.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -82,17 +82,17 @@ impl Parser {
8282
let start = token.start().unwrap_or(0);
8383

8484
Ok(Node::Comment {
85-
content: token.token_type().to_string(),
86-
span: Span::new(start, token.token_type().len().unwrap_or(0) as u32),
85+
content: token.content().to_string(),
86+
span: Span::new(start, token.content().len() as u32),
8787
})
8888
}
8989

9090
fn parse_django_block(&mut self, content: &str) -> Result<Node, ParserError> {
9191
let token = self.peek_previous()?;
9292
let start = token.start().unwrap_or(0);
93-
let length = token.length().unwrap_or(0);
93+
let length = token.content().len();
9494

95-
let span = Span::new(start, length);
95+
let span = Span::new(start, length as u32);
9696

9797
let bits: Vec<String> = content.split_whitespace().map(String::from).collect();
9898
let tag_name = bits.first().ok_or(ParserError::EmptyTag)?.clone();
@@ -227,7 +227,7 @@ impl Parser {
227227
fn parse_django_variable(&mut self) -> Result<Node, ParserError> {
228228
let token = self.peek_previous()?;
229229
let start = token.start().unwrap_or(0);
230-
let content = token.token_type().lexeme();
230+
let content = token.content();
231231

232232
let parts: Vec<&str> = content.split('|').collect();
233233
let bits: Vec<String> = parts[0].split('.').map(|s| s.trim().to_string()).collect();
@@ -267,7 +267,7 @@ impl Parser {
267267
return self.next_node();
268268
}
269269

270-
let mut text = token.token_type().to_string();
270+
let mut text = token.lexeme();
271271

272272
while let Ok(token) = self.peek() {
273273
match token.token_type() {
@@ -277,7 +277,7 @@ impl Parser {
277277
| TokenType::Newline
278278
| TokenType::Eof => break,
279279
_ => {
280-
let token_text = token.token_type().to_string();
280+
let token_text = token.lexeme();
281281
text.push_str(&token_text);
282282
self.consume()?;
283283
}

crates/djls-template-ast/src/snapshots/djls_template_ast__parser__tests__comments__parse_comments.snap

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,9 @@ nodes:
99
start: 0
1010
length: 21
1111
- Comment:
12-
content: "{# Django comment #}"
12+
content: Django comment
1313
span:
1414
start: 21
15-
length: 20
15+
length: 14
1616
line_offsets:
1717
- 0

crates/djls-template-ast/src/snapshots/djls_template_ast__parser__tests__errors__parse_error_recovery.snap

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ nodes:
2929
assignment: ~
3030
nodes:
3131
- Comment:
32-
content: "{# This if is unclosed which does matter #}"
32+
content: This if is unclosed which does matter
3333
span:
3434
start: 87
35-
length: 43
35+
length: 37
3636
- Text:
3737
content: "<p>Welcome"
3838
span:
@@ -57,10 +57,10 @@ nodes:
5757
start: 178
5858
length: 5
5959
- Comment:
60-
content: "{# This div is unclosed which doesn't matter #}"
60+
content: "This div is unclosed which doesn't matter"
6161
span:
6262
start: 196
63-
length: 47
63+
length: 41
6464
- Block:
6565
Block:
6666
tag:

crates/djls-template-ast/src/snapshots/djls_template_ast__parser__tests__full_templates__parse_full.snap

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,10 +109,10 @@ nodes:
109109
assignment: ~
110110
nodes:
111111
- Comment:
112-
content: "{# Welcome message #}"
112+
content: Welcome message
113113
span:
114114
start: 510
115-
length: 21
115+
length: 15
116116
- Text:
117117
content: "<h1>Welcome,"
118118
span:

crates/djls-template-ast/src/tokens.rs

Lines changed: 37 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
use serde::Serialize;
2-
use std::fmt;
32
use std::ops::{Deref, DerefMut};
43

54
#[derive(Clone, Debug, Serialize, PartialEq)]
@@ -33,56 +32,10 @@ impl TokenType {
3332
| TokenType::StyleTagOpen(s)
3433
| TokenType::StyleTagClose(s)
3534
| TokenType::Text(s) => Some(s.len()),
36-
TokenType::Comment(content, start, end) => match end {
37-
Some(end) => Some(start.len() + 1 + content.len() + 1 + end.len()),
38-
None => Some(start.len() + 1 + content.len()),
39-
},
40-
TokenType::Whitespace(len) => Some(*len),
35+
TokenType::Comment(content, _, _) => Some(content.len()),
36+
TokenType::Whitespace(n) => Some(*n),
4137
TokenType::Newline => Some(1),
42-
TokenType::Eof => None,
43-
}
44-
}
45-
46-
pub fn lexeme(&self) -> &str {
47-
match self {
48-
TokenType::DjangoBlock(s)
49-
| TokenType::DjangoVariable(s)
50-
| TokenType::HtmlTagOpen(s)
51-
| TokenType::HtmlTagClose(s)
52-
| TokenType::HtmlTagVoid(s)
53-
| TokenType::ScriptTagOpen(s)
54-
| TokenType::ScriptTagClose(s)
55-
| TokenType::StyleTagOpen(s)
56-
| TokenType::StyleTagClose(s)
57-
| TokenType::Text(s) => s,
58-
TokenType::Comment(content, _, _) => content,
59-
TokenType::Whitespace(_) => " ",
60-
TokenType::Newline => "\n",
61-
TokenType::Eof => "",
62-
}
63-
}
64-
}
65-
66-
impl fmt::Display for TokenType {
67-
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
68-
use TokenType::*;
69-
70-
match self {
71-
Comment(content, start, end) => match end {
72-
Some(end) => write!(f, "{} {} {}", start, content, end),
73-
None => write!(f, "{} {}", start, content),
74-
},
75-
DjangoBlock(s) => write!(f, "{{% {} %}}", s),
76-
DjangoVariable(s) => write!(f, "{{{{ {} }}}}", s),
77-
Eof => Ok(()),
78-
HtmlTagOpen(s) | ScriptTagOpen(s) | StyleTagOpen(s) => write!(f, "<{}>", s),
79-
HtmlTagClose(s) => write!(f, "</{}>", s),
80-
HtmlTagVoid(s) => write!(f, "<{}/>", s),
81-
Newline => f.write_str("\n"),
82-
ScriptTagClose(_) => f.write_str("</script>"),
83-
StyleTagClose(_) => f.write_str("</style>"),
84-
Text(s) => f.write_str(s),
85-
Whitespace(len) => f.write_str(&" ".repeat(*len)),
38+
TokenType::Eof => Some(0),
8639
}
8740
}
8841
}
@@ -103,15 +56,43 @@ impl Token {
10356
}
10457
}
10558

106-
pub fn lexeme_from_source<'a>(&self, source: &'a str) -> Option<&'a str> {
107-
match (self.start, self.token_type.len()) {
108-
(Some(start), Some(len)) => Some(&source[start..start + len]),
109-
_ => None,
59+
pub fn lexeme(&self) -> String {
60+
match &self.token_type {
61+
TokenType::Comment(_, start, end) => match end {
62+
Some(end) => format!("{} {} {}", start, self.content(), end),
63+
None => format!("{} {}", start, self.content()),
64+
},
65+
TokenType::DjangoBlock(_) => format!("{{% {} %}}", self.content()),
66+
TokenType::DjangoVariable(_) => format!("{{{{ {} }}}}", self.content()),
67+
TokenType::Eof => String::new(),
68+
TokenType::HtmlTagOpen(_)
69+
| TokenType::ScriptTagOpen(_)
70+
| TokenType::StyleTagOpen(_) => format!("<{}>", self.content()),
71+
TokenType::HtmlTagClose(_)
72+
| TokenType::StyleTagClose(_)
73+
| TokenType::ScriptTagClose(_) => format!("</{}>", self.content()),
74+
TokenType::HtmlTagVoid(_) => format!("<{}/>", self.content()),
75+
TokenType::Newline | TokenType::Text(_) | TokenType::Whitespace(_) => self.content(),
11076
}
11177
}
11278

113-
pub fn lexeme(&self) -> &str {
114-
self.token_type.lexeme()
79+
pub fn content(&self) -> String {
80+
match &self.token_type {
81+
TokenType::Comment(s, _, _)
82+
| TokenType::DjangoBlock(s)
83+
| TokenType::DjangoVariable(s)
84+
| TokenType::Text(s)
85+
| TokenType::HtmlTagOpen(s)
86+
| TokenType::HtmlTagClose(s)
87+
| TokenType::HtmlTagVoid(s)
88+
| TokenType::ScriptTagOpen(s)
89+
| TokenType::ScriptTagClose(s)
90+
| TokenType::StyleTagOpen(s)
91+
| TokenType::StyleTagClose(s) => s.to_string(),
92+
TokenType::Whitespace(len) => " ".repeat(*len),
93+
TokenType::Newline => "\n".to_string(),
94+
TokenType::Eof => "".to_string(),
95+
}
11596
}
11697

11798
pub fn token_type(&self) -> &TokenType {

0 commit comments

Comments
 (0)