11use djls_source:: Span ;
22
33use crate :: db:: Db as TemplateDb ;
4+ use crate :: tokens:: TagDelimiter ;
45use crate :: tokens:: Token ;
56use crate :: tokens:: TokenContent ;
6- use crate :: tokens:: BLOCK_TAG_END ;
7- use crate :: tokens:: BLOCK_TAG_START ;
8- use crate :: tokens:: COMMENT_TAG_END ;
9- use crate :: tokens:: COMMENT_TAG_START ;
10- use crate :: tokens:: DJANGO_TAG_LEN ;
11- use crate :: tokens:: VARIABLE_TAG_END ;
12- use crate :: tokens:: VARIABLE_TAG_START ;
137
148pub struct Lexer < ' db > {
159 db : & ' db dyn TemplateDb ,
@@ -37,19 +31,25 @@ impl<'db> Lexer<'db> {
3731
3832 let token =
3933 match self . peek ( ) {
40- '{' => match self . peek_next ( ) {
41- '%' => self . lex_django_tag ( BLOCK_TAG_END , |content, span| Token :: Block {
42- content,
43- span,
44- } ) ,
45- '{' => self . lex_django_tag ( VARIABLE_TAG_END , |content, span| {
46- Token :: Variable { content, span }
47- } ) ,
48- '#' => self . lex_django_tag ( COMMENT_TAG_END , |content, span| {
49- Token :: Comment { content, span }
50- } ) ,
51- _ => self . lex_text ( ) ,
52- } ,
34+ '{' => {
35+ let remaining = & self . source [ self . current ..] ;
36+
37+ match TagDelimiter :: from_input ( remaining) {
38+ Some ( TagDelimiter :: Block ) => self
39+ . lex_django_tag ( TagDelimiter :: Block , |content, span| {
40+ Token :: Block { content, span }
41+ } ) ,
42+ Some ( TagDelimiter :: Variable ) => self
43+ . lex_django_tag ( TagDelimiter :: Variable , |content, span| {
44+ Token :: Variable { content, span }
45+ } ) ,
46+ Some ( TagDelimiter :: Comment ) => self
47+ . lex_django_tag ( TagDelimiter :: Comment , |content, span| {
48+ Token :: Comment { content, span }
49+ } ) ,
50+ None => self . lex_text ( ) ,
51+ }
52+ }
5353 c if c. is_whitespace ( ) => self . lex_whitespace ( c) ,
5454 _ => self . lex_text ( ) ,
5555 } ;
@@ -64,18 +64,18 @@ impl<'db> Lexer<'db> {
6464
6565 fn lex_django_tag (
6666 & mut self ,
67- end : & str ,
67+ delimiter : TagDelimiter ,
6868 token_fn : impl FnOnce ( TokenContent < ' db > , Span ) -> Token < ' db > ,
6969 ) -> Token < ' db > {
70- let content_start = self . start + DJANGO_TAG_LEN as usize ;
71- self . consume_n ( DJANGO_TAG_LEN as usize ) ;
70+ let content_start = self . start + TagDelimiter :: LENGTH ;
71+ self . consume_n ( TagDelimiter :: LENGTH ) ;
7272
73- match self . consume_until ( end ) {
73+ match self . consume_until ( delimiter . closer ( ) ) {
7474 Ok ( text) => {
7575 let len = text. len ( ) ;
7676 let content = TokenContent :: new ( self . db , text) ;
7777 let span = Span :: from_parts ( content_start, len) ;
78- self . consume_n ( end . len ( ) ) ;
78+ self . consume_n ( delimiter . closer ( ) . len ( ) ) ;
7979 token_fn ( content, span)
8080 }
8181 Err ( err_text) => {
@@ -116,10 +116,10 @@ impl<'db> Lexer<'db> {
116116 let text_start = self . current ;
117117
118118 while !self . is_at_end ( ) {
119- if self . source [ self . current ..] . starts_with ( BLOCK_TAG_START )
120- || self . source [ self . current .. ] . starts_with ( VARIABLE_TAG_START )
121- || self . source [ self . current .. ] . starts_with ( COMMENT_TAG_START )
122- || self . source [ self . current .. ] . starts_with ( '\n ' )
119+ let slice = & self . source [ self . current ..] ;
120+ if ( self . peek ( ) == '{' && TagDelimiter :: from_input ( slice ) . is_some ( ) )
121+ || slice . starts_with ( '\n' )
122+ || slice . starts_with ( '\r ' )
123123 {
124124 break ;
125125 }
@@ -137,13 +137,6 @@ impl<'db> Lexer<'db> {
137137 self . source [ self . current ..] . chars ( ) . next ( ) . unwrap_or ( '\0' )
138138 }
139139
140- #[ inline]
141- fn peek_next ( & self ) -> char {
142- let mut chars = self . source [ self . current ..] . chars ( ) ;
143- chars. next ( ) ; // Skip current
144- chars. next ( ) . unwrap_or ( '\0' )
145- }
146-
147140 #[ inline]
148141 fn is_at_end ( & self ) -> bool {
149142 self . current >= self . source . len ( )
@@ -167,15 +160,12 @@ impl<'db> Lexer<'db> {
167160 let mut fallback: Option < usize > = None ;
168161
169162 while self . current < self . source . len ( ) {
170- if self . source [ self . current ..] . starts_with ( delimiter) {
163+ let slice = & self . source [ self . current ..] ;
164+ if slice. starts_with ( delimiter) {
171165 return Ok ( self . source [ offset..self . current ] . to_string ( ) ) ;
172166 }
173167
174- if fallback. is_none ( )
175- && ( self . source [ self . current ..] . starts_with ( BLOCK_TAG_START )
176- || self . source [ self . current ..] . starts_with ( VARIABLE_TAG_START )
177- || self . source [ self . current ..] . starts_with ( COMMENT_TAG_START ) )
178- {
168+ if fallback. is_none ( ) && TagDelimiter :: from_input ( slice) . is_some ( ) {
179169 fallback = Some ( self . current ) ;
180170 }
181171
0 commit comments