Skip to content

Commit 3a24bc3

Browse files
revert span pair
1 parent 7bfc257 commit 3a24bc3

File tree

6 files changed

+159
-361
lines changed

6 files changed

+159
-361
lines changed

crates/djls-templates/src/lexer.rs

Lines changed: 14 additions & 203 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
use djls_source::Span;
22

33
use crate::db::Db as TemplateDb;
4-
use crate::spans::SpanPair;
54
use crate::tokens::Token;
65
use crate::tokens::TokenContent;
76

@@ -38,14 +37,15 @@ impl<'db> Lexer<'db> {
3837

3938
let token = match self.peek() {
4039
'{' => match self.peek_next() {
41-
'%' => self.lex_django_construct(BLOCK_TAG_END, |content, spans| {
42-
Token::Block { content, spans }
40+
'%' => self.lex_django_construct(BLOCK_TAG_END, |content, span| Token::Block {
41+
content,
42+
span,
4343
}),
44-
'{' => self.lex_django_construct(VARIABLE_TAG_END, |content, spans| {
45-
Token::Variable { content, spans }
44+
'{' => self.lex_django_construct(VARIABLE_TAG_END, |content, span| {
45+
Token::Variable { content, span }
4646
}),
47-
'#' => self.lex_django_construct(COMMENT_TAG_END, |content, spans| {
48-
Token::Comment { content, spans }
47+
'#' => self.lex_django_construct(COMMENT_TAG_END, |content, span| {
48+
Token::Comment { content, span }
4949
}),
5050
_ => self.lex_text(),
5151
},
@@ -64,32 +64,26 @@ impl<'db> Lexer<'db> {
6464
fn lex_django_construct(
6565
&mut self,
6666
end: &str,
67-
token_fn: impl FnOnce(TokenContent<'db>, SpanPair) -> Token<'db>,
67+
token_fn: impl FnOnce(TokenContent<'db>, Span) -> Token<'db>,
6868
) -> Token<'db> {
6969
let opening_len = 2;
7070
let content_start = self.start + opening_len;
7171

72-
self.consume_n(2);
72+
self.consume_n(opening_len);
7373

7474
match self.consume_until(end) {
7575
Ok(text) => {
7676
let content = TokenContent::new(self.db, text);
7777
let content_end = self.current;
7878
let span = Span::from_bounds(content_start, content_end);
7979
self.consume_n(end.len());
80-
let full_end = self.current;
81-
let full_span = Span::from_bounds(self.start, full_end);
82-
token_fn(content, SpanPair::new(span, full_span))
80+
token_fn(content, span)
8381
}
8482
Err(err_text) => {
8583
let content_end = self.current;
8684
let span = Span::from_bounds(content_start, content_end);
87-
let full_span = Span::from_bounds(self.start, content_end);
8885
let content = TokenContent::new(self.db, err_text);
89-
Token::Error {
90-
content,
91-
spans: SpanPair::new(span, full_span),
92-
}
86+
Token::Error { content, span }
9387
}
9488
}
9589
}
@@ -101,8 +95,7 @@ impl<'db> Lexer<'db> {
10195
self.consume(); // \n of \r\n
10296
}
10397
let span = Span::from_bounds(self.start, self.current);
104-
let spans = SpanPair::new(span, span);
105-
Token::Newline { spans }
98+
Token::Newline { span }
10699
} else {
107100
self.consume(); // Consume the first whitespace
108101
while !self.is_at_end() && self.peek().is_whitespace() {
@@ -112,8 +105,7 @@ impl<'db> Lexer<'db> {
112105
self.consume();
113106
}
114107
let span = Span::from_bounds(self.start, self.current);
115-
let spans = SpanPair::new(span, span);
116-
Token::Whitespace { spans }
108+
Token::Whitespace { span }
117109
}
118110
}
119111

@@ -134,8 +126,7 @@ impl<'db> Lexer<'db> {
134126
let text = &self.source[text_start..self.current];
135127
let content = TokenContent::new(self.db, text.to_string());
136128
let span = Span::from_bounds(self.start, self.current);
137-
let spans = SpanPair::new(span, span);
138-
Token::Text { content, spans }
129+
Token::Text { content, span }
139130
}
140131

141132
#[inline]
@@ -198,183 +189,3 @@ impl<'db> Lexer<'db> {
198189
Err(text)
199190
}
200191
}
201-
202-
#[cfg(test)]
203-
mod tests {
204-
use camino::Utf8Path;
205-
206-
use super::*;
207-
use crate::tokens::TokenSnapshotVec;
208-
209-
#[salsa::db]
210-
#[derive(Clone)]
211-
struct TestDatabase {
212-
storage: salsa::Storage<Self>,
213-
}
214-
215-
impl TestDatabase {
216-
fn new() -> Self {
217-
Self {
218-
storage: salsa::Storage::default(),
219-
}
220-
}
221-
}
222-
223-
#[salsa::db]
224-
impl salsa::Database for TestDatabase {}
225-
226-
#[salsa::db]
227-
impl djls_source::Db for TestDatabase {
228-
fn read_file_source(&self, path: &Utf8Path) -> Result<String, std::io::Error> {
229-
std::fs::read_to_string(path)
230-
}
231-
}
232-
233-
#[salsa::db]
234-
impl crate::db::Db for TestDatabase {
235-
// Template parsing only - semantic analysis moved to djls-semantic
236-
}
237-
238-
#[test]
239-
fn test_tokenize_html() {
240-
let db = TestDatabase::new();
241-
let source = r#"<div class="container" id="main" disabled></div>"#;
242-
let mut lexer = Lexer::new(&db, source);
243-
let tokens = lexer.tokenize();
244-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
245-
insta::assert_yaml_snapshot!(snapshot);
246-
}
247-
248-
#[test]
249-
fn test_tokenize_django_variable() {
250-
let db = TestDatabase::new();
251-
let source = "{{ user.name|default:\"Anonymous\"|title }}";
252-
let mut lexer = Lexer::new(&db, source);
253-
let tokens = lexer.tokenize();
254-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
255-
insta::assert_yaml_snapshot!(snapshot);
256-
}
257-
258-
#[test]
259-
fn test_tokenize_django_block() {
260-
let db = TestDatabase::new();
261-
let source = "{% if user.is_staff %}Admin{% else %}User{% endif %}";
262-
let mut lexer = Lexer::new(&db, source);
263-
let tokens = lexer.tokenize();
264-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
265-
insta::assert_yaml_snapshot!(snapshot);
266-
}
267-
268-
#[test]
269-
fn test_tokenize_comments() {
270-
let db = TestDatabase::new();
271-
let source = r"<!-- HTML comment -->
272-
{# Django comment #}
273-
<script>
274-
// JS single line comment
275-
/* JS multi-line
276-
comment */
277-
</script>
278-
<style>
279-
/* CSS comment */
280-
</style>";
281-
let mut lexer = Lexer::new(&db, source);
282-
let tokens = lexer.tokenize();
283-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
284-
insta::assert_yaml_snapshot!(snapshot);
285-
}
286-
287-
#[test]
288-
fn test_tokenize_script() {
289-
let db = TestDatabase::new();
290-
let source = r#"<script type="text/javascript">
291-
// Single line comment
292-
const x = 1;
293-
/* Multi-line
294-
comment */
295-
console.log(x);
296-
</script>"#;
297-
let mut lexer = Lexer::new(&db, source);
298-
let tokens = lexer.tokenize();
299-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
300-
insta::assert_yaml_snapshot!(snapshot);
301-
}
302-
303-
#[test]
304-
fn test_tokenize_style() {
305-
let db = TestDatabase::new();
306-
let source = r#"<style type="text/css">
307-
/* Header styles */
308-
.header {
309-
color: blue;
310-
}
311-
</style>"#;
312-
let mut lexer = Lexer::new(&db, source);
313-
let tokens = lexer.tokenize();
314-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
315-
insta::assert_yaml_snapshot!(snapshot);
316-
}
317-
318-
#[test]
319-
fn test_tokenize_nested_delimiters() {
320-
let db = TestDatabase::new();
321-
let source = r"{{ user.name }}
322-
{% if true %}
323-
{# comment #}
324-
<!-- html comment -->
325-
<div>text</div>";
326-
let mut lexer = Lexer::new(&db, source);
327-
let tokens = lexer.tokenize();
328-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
329-
insta::assert_yaml_snapshot!(snapshot);
330-
}
331-
332-
#[test]
333-
fn test_tokenize_everything() {
334-
let db = TestDatabase::new();
335-
let source = r#"<!DOCTYPE html>
336-
<html>
337-
<head>
338-
<style type="text/css">
339-
/* Style header */
340-
.header { color: blue; }
341-
</style>
342-
<script type="text/javascript">
343-
// Init app
344-
const app = {
345-
/* Config */
346-
debug: true
347-
};
348-
</script>
349-
</head>
350-
<body>
351-
<!-- Header section -->
352-
<div class="header" id="main" data-value="123" disabled>
353-
{% if user.is_authenticated %}
354-
{# Welcome message #}
355-
<h1>Welcome, {{ user.name|default:"Guest"|title }}!</h1>
356-
{% if user.is_staff %}
357-
<span>Admin</span>
358-
{% else %}
359-
<span>User</span>
360-
{% endif %}
361-
{% endif %}
362-
</div>
363-
</body>
364-
</html>"#;
365-
let mut lexer = Lexer::new(&db, source);
366-
let tokens = lexer.tokenize();
367-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
368-
insta::assert_yaml_snapshot!(snapshot);
369-
}
370-
371-
#[test]
372-
fn test_tokenize_unclosed_style() {
373-
let db = TestDatabase::new();
374-
let source = "<style>body { color: blue; ";
375-
let mut lexer = Lexer::new(&db, source);
376-
let tokens = lexer.tokenize();
377-
let snapshot = TokenSnapshotVec(tokens).to_snapshot(&db);
378-
insta::assert_yaml_snapshot!(snapshot);
379-
}
380-
}

crates/djls-templates/src/lib.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@ mod error;
5050
mod lexer;
5151
pub mod nodelist;
5252
mod parser;
53-
mod spans;
5453
mod tokens;
5554

5655
pub use db::Db;
@@ -65,7 +64,6 @@ pub use nodelist::NodeList;
6564
pub use parser::ParseError;
6665
pub use parser::Parser;
6766
use salsa::Accumulator;
68-
use spans::SpanPair;
6967
use tokens::TokenStream;
7068

7169
/// Lex a template file into tokens.
@@ -110,9 +108,12 @@ pub fn parse_template(db: &dyn Db, file: File) -> Option<NodeList<'_>> {
110108

111109
let text = source.as_ref();
112110
let span = djls_source::Span::from_bounds(0, text.len());
113-
let spans = SpanPair::new(span, span);
114111
let error_node = Node::Error {
115-
node: ErrorNode { spans, error: err },
112+
node: ErrorNode {
113+
span,
114+
full_span: span,
115+
error: err,
116+
},
116117
};
117118

118119
NodeList::new(db, vec![error_node])

0 commit comments

Comments
 (0)