1
- def get_parsed_tokens (tokens , parsed , token_idx , depth = - 1 ):
1
+ def get_parsed_tokens (tokens , parsed , token_idx , depth = 0 ):
2
2
"""
3
3
start -> tabs -> property -> equals -> value -> newline
4
4
^ v ^ v
@@ -10,46 +10,40 @@ def get_parsed_tokens(tokens, parsed, token_idx, depth=-1):
10
10
while token_idx [0 ] < len (tokens ):
11
11
token = tokens [token_idx [0 ]]
12
12
13
- if depth == - 1 :
14
- parsed .append ([])
15
- get_parsed_tokens (tokens , parsed [- 1 ], token_idx , depth + 1 )
16
-
17
- elif state == "start" and token ["type" ] == "TABS" and is_deeper (depth , token ):
18
- parsed .append (
19
- { "type" : "lines_tokens" , "content" : [
20
- [
21
-
22
- ]
23
- ]}
24
- )
25
- get_parsed_tokens (tokens , parsed [- 1 ]["content" ][0 ], token_idx , depth + 1 )
13
+ if state == "start" and token ["type" ] == "TABS" and is_deeper (depth , token ):
14
+ children = { "type" : "children" , "content" : [] }
15
+ parsed [- 1 ].append (children )
16
+ get_parsed_tokens (tokens , children ["content" ], token_idx , depth + 1 )
17
+ elif state == "start" and is_less_deep (depth , token ):
18
+ return
26
19
27
- elif state == "start" and token ["type" ] == "TABS" :
28
- parsed .append ( { "type" : "extra" , "content" : token ["content" ] } )
20
+ elif state == "start" :
21
+ parsed .append ([])
22
+ state = "not-start"
23
+ elif state == "not-start" and token ["type" ] == "TABS" :
24
+ parsed [- 1 ].append ( { "type" : "extra" , "content" : token ["content" ] } )
29
25
state = "tabs"
30
26
token_idx [0 ] += 1
31
- elif (state == "start" or state == "tabs" ) and token ["type" ] == "WORD" :
32
- parsed .append ( { "type" : "property" , "content" : token ["content" ] } )
27
+ elif (state == "not- start" or state == "tabs" ) and token ["type" ] == "WORD" :
28
+ parsed [ - 1 ] .append ( { "type" : "property" , "content" : token ["content" ] } )
33
29
state = "property"
34
30
token_idx [0 ] += 1
35
- elif state == "start" and is_less_deep (depth , token ):
36
- return
37
31
38
32
elif state == "property" and token ["type" ] == "EQUALS" :
39
- parsed .append ( { "type" : "extra" , "content" : token ["content" ] } )
33
+ parsed [ - 1 ] .append ( { "type" : "extra" , "content" : token ["content" ] } )
40
34
state = "equals"
41
35
token_idx [0 ] += 1
42
36
elif state == "equals" and token ["type" ] == "WORD" :
43
- parsed .append ( { "type" : "value" , "content" : token ["content" ] } )
37
+ parsed [ - 1 ] .append ( { "type" : "value" , "content" : token ["content" ] } )
44
38
state = "value"
45
39
token_idx [0 ] += 1
46
40
elif state == "value" and token ["type" ] == "NEWLINES" :
47
- parsed .append ( { "type" : "extra" , "content" : token ["content" ] } )
41
+ parsed [ - 1 ] .append ( { "type" : "extra" , "content" : token ["content" ] } )
48
42
state = "start"
49
43
token_idx [0 ] += 1
50
44
51
45
else :
52
- parsed .append ( { "type" : "extra" , "content" : token ["content" ] } )
46
+ parsed [ - 1 ] .append ( { "type" : "extra" , "content" : token ["content" ] } )
53
47
token_idx [0 ] += 1
54
48
55
49
return parsed
@@ -65,4 +59,4 @@ def is_deeper(depth, token):
65
59
66
60
67
61
def get_depth (token ):
68
- return len (token ["content" ])
62
+ return len (token ["content" ]) if token [ "type" ] == "TABS" else 0
0 commit comments