Skip to content

Commit b957b47

Browse files
committed
Refactor so that is_deeper(), is_same_depth() and is_shallower() all call get_depth()
1 parent 4597964 commit b957b47

File tree

1 file changed

+21
-18
lines changed

1 file changed

+21
-18
lines changed

Python/ini_converting/ini_parser.py

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@ def get_parsed_tokens(tokens, parsed=None, token_idx=None, depth=0):
1515
while token_idx[0] < len(tokens):
1616
token = tokens[token_idx[0]]
1717

18-
if state == "newline" and is_deeper(depth, token):
18+
if state == "newline" and is_deeper(depth, token, tokens, token_idx[0] + 1):
1919
children = { "type": "children", "content": [] }
2020
parsed[-1].append(children)
2121
get_parsed_tokens(tokens, children["content"], token_idx, depth + 1)
2222
# "state" is deliberately not being changed here.
23-
elif state == "newline" and is_same_depth(token, depth):
23+
elif state == "newline" and is_same_depth(depth, token, tokens, token_idx[0] + 1):
2424
parsed.append([])
2525
state = "start"
2626
elif state == "newline" and is_shallower(depth, token, tokens, token_idx[0] + 1):
@@ -53,11 +53,11 @@ def get_parsed_tokens(tokens, parsed=None, token_idx=None, depth=0):
5353
return parsed
5454

5555

56-
def is_deeper(depth, token):
56+
def is_deeper(depth, token, tokens, next_token_idx):
5757
if token["type"] != "TABS":
5858
return False
5959

60-
new_depth = get_depth(token)
60+
new_depth = get_depth(token, tokens, next_token_idx)
6161

6262
if new_depth > depth + 1:
6363
line, column = get_token_position(token)
@@ -66,29 +66,32 @@ def is_deeper(depth, token):
6666
return new_depth > depth
6767

6868

69-
def get_depth(token):
70-
return len(token["content"])
71-
72-
73-
def is_same_depth(token, depth):
74-
return token["type"] == "TABS" and get_depth(token) == depth
75-
76-
77-
def is_shallower(depth, token, tokens, next_token_idx):
78-
if depth == 0 or token["type"] == "NEWLINES":
79-
return False
69+
def get_depth(token, tokens, next_token_idx):
70+
if token["type"] == "NEWLINES":
71+
return -1
72+
elif token["type"] == "WORD":
73+
return 0
8074

8175
while next_token_idx < len(tokens):
8276
next_token = tokens[next_token_idx]
8377

8478
if next_token["type"] == "WORD":
85-
return True
79+
return len(token["content"]) # Counts the number of tabs.
8680
elif next_token["type"] == "NEWLINES":
87-
return False
81+
return -1
8882

8983
next_token_idx += 1
9084

91-
return False # Reached when the while-loop read the last character of the file and didn't return.
85+
return -1 # Reached when the while-loop read the last character of the file and didn't return.
86+
87+
88+
def is_same_depth(depth, token, tokens, next_token_idx):
89+
return token["type"] == "TABS" and get_depth(token, tokens, next_token_idx) == depth
90+
91+
92+
def is_shallower(depth, token, tokens, next_token_idx):
93+
new_depth = get_depth(token, tokens, next_token_idx)
94+
return new_depth != -1 and new_depth < depth
9295

9396

9497
def get_token_position(token):

0 commit comments

Comments
 (0)