Skip to content

Commit b10df9b

Browse files
clean up
1 parent dcd0cab commit b10df9b

File tree

2 files changed

+44
-22
lines changed

2 files changed

+44
-22
lines changed

crates/djls-template-ast/src/ast.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
use serde::Serialize;
22
use thiserror::Error;
33

4+
use crate::tokens::{Token, TokenType};
5+
46
#[derive(Clone, Debug, Default, Serialize)]
57
pub struct Ast {
68
nodes: Vec<Node>,
@@ -89,6 +91,22 @@ impl Span {
8991
}
9092
}
9193

94+
impl From<Token> for Span {
95+
fn from(token: Token) -> Self {
96+
let start = {
97+
let token_start = token.start().unwrap_or(0);
98+
match token.token_type() {
99+
TokenType::Comment(_, start, _) => token_start + start.len() as u32,
100+
TokenType::DjangoBlock(_) | TokenType::DjangoVariable(_) => token_start + 2,
101+
_ => token_start,
102+
}
103+
};
104+
let length = token.content().len() as u32;
105+
106+
Span::new(start, length)
107+
}
108+
}
109+
92110
#[derive(Clone, Debug, Serialize)]
93111
pub enum Node {
94112
Text {

crates/djls-template-ast/src/parser.rs

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,11 @@ impl Parser {
2323
let mut line_offsets = LineOffsets::new();
2424

2525
// First pass: collect line offsets
26-
let mut current_line_start = 0;
2726
for token in self.tokens.tokens() {
2827
if let TokenType::Newline = token.token_type() {
2928
if let Some(start) = token.start() {
3029
// Add offset for next line
31-
current_line_start = start + 1;
32-
line_offsets.add_line(current_line_start);
30+
line_offsets.add_line(start + 1);
3331
}
3432
}
3533
}
@@ -57,7 +55,7 @@ impl Parser {
5755
match token.token_type() {
5856
TokenType::Comment(_, open, _) => self.parse_comment(open),
5957
TokenType::Eof => Err(ParserError::Ast(AstError::StreamError("AtEnd".to_string()))),
60-
TokenType::DjangoBlock(content) => self.parse_django_block(content),
58+
TokenType::DjangoBlock(_) => self.parse_django_block(),
6159
TokenType::DjangoVariable(_) => self.parse_django_variable(),
6260
TokenType::HtmlTagClose(_)
6361
| TokenType::HtmlTagOpen(_)
@@ -79,23 +77,23 @@ impl Parser {
7977
};
8078

8179
let token = self.peek_previous()?;
82-
let start = token.start().unwrap_or(0);
8380

8481
Ok(Node::Comment {
85-
content: token.content().to_string(),
86-
span: Span::new(start, token.content().len() as u32),
82+
content: token.content(),
83+
span: Span::from(token),
8784
})
8885
}
8986

90-
fn parse_django_block(&mut self, content: &str) -> Result<Node, ParserError> {
87+
fn parse_django_block(&mut self) -> Result<Node, ParserError> {
9188
let token = self.peek_previous()?;
92-
let start = token.start().unwrap_or(0);
93-
let length = token.content().len();
94-
95-
let span = Span::new(start, length as u32);
9689

97-
let bits: Vec<String> = content.split_whitespace().map(String::from).collect();
90+
let bits: Vec<String> = token
91+
.content()
92+
.split_whitespace()
93+
.map(String::from)
94+
.collect();
9895
let tag_name = bits.first().ok_or(ParserError::EmptyTag)?.clone();
96+
let span = Span::from(token);
9997

10098
let tag = Tag {
10199
name: tag_name.clone(),
@@ -226,16 +224,16 @@ impl Parser {
226224

227225
fn parse_django_variable(&mut self) -> Result<Node, ParserError> {
228226
let token = self.peek_previous()?;
229-
let start = token.start().unwrap_or(0);
230-
let content = token.content();
231227

228+
let content = token.content();
232229
let parts: Vec<&str> = content.split('|').collect();
233230
let bits: Vec<String> = parts[0].split('.').map(|s| s.trim().to_string()).collect();
231+
234232
let mut filters = Vec::new();
233+
let mut filter_offset = parts[0].len() as u32 + 1;
235234

236235
for filter_part in parts.iter().skip(1) {
237236
let filter_parts: Vec<&str> = filter_part.split(':').collect();
238-
let name = filter_parts[0].trim();
239237
let args = if filter_parts.len() > 1 {
240238
filter_parts[1]
241239
.split(',')
@@ -246,22 +244,26 @@ impl Parser {
246244
};
247245

248246
filters.push(DjangoFilter {
249-
name: name.to_string(),
247+
name: filter_parts[0].trim().to_string(),
250248
args,
251-
span: Span::new(start + 4, content.len() as u32),
249+
span: Span::new(
250+
token.start().unwrap_or(0) + filter_offset,
251+
filter_part.len() as u32,
252+
),
252253
});
254+
255+
filter_offset += filter_part.len() as u32 + 1;
253256
}
254257

255258
Ok(Node::Variable {
256259
bits,
257260
filters,
258-
span: Span::new(start + 3, content.len() as u32),
261+
span: Span::from(token),
259262
})
260263
}
261264

262265
fn parse_text(&mut self) -> Result<Node, ParserError> {
263266
let token = self.peek_previous()?;
264-
let start = token.start().unwrap_or(0);
265267

266268
if token.token_type() == &TokenType::Newline {
267269
return self.next_node();
@@ -288,8 +290,10 @@ impl Parser {
288290
"" => return self.next_node(),
289291
trimmed => trimmed.to_string(),
290292
};
291-
let offset = u32::try_from(text.find(content.as_str()).unwrap_or(0)).unwrap();
292-
let length = u32::try_from(content.len()).unwrap();
293+
294+
let start = token.start().unwrap_or(0);
295+
let offset = text.find(content.as_str()).unwrap_or(0) as u32;
296+
let length = content.len() as u32;
293297

294298
Ok(Node::Text {
295299
content,

0 commit comments

Comments
 (0)