Skip to content

Commit 568f1d2

Browse files
author
Gerit Wagner
committed
update tests/coverage
1 parent 051caeb commit 568f1d2

19 files changed

+71
-74
lines changed

.coverage

0 Bytes
Binary file not shown.

search_query/constants.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -59,18 +59,6 @@ class Token:
5959
type: TokenTypes
6060
position: Tuple[int, int]
6161

62-
def is_parenthesis(self) -> bool:
63-
"""Check if token is a parenthesis"""
64-
return self.type in (TokenTypes.PARENTHESIS_OPEN, TokenTypes.PARENTHESIS_CLOSED)
65-
66-
def is_search_term(self) -> bool:
67-
"""Check if token is a search term"""
68-
return self.type == TokenTypes.SEARCH_TERM
69-
70-
def is_field(self) -> bool:
71-
"""Check if token is a field"""
72-
return self.type == TokenTypes.FIELD
73-
7462
def is_operator(self) -> bool:
7563
"""Check if token is an operator"""
7664
return self.type in (TokenTypes.LOGIC_OPERATOR, TokenTypes.PROXIMITY_OPERATOR)

search_query/database.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@
44

55
try:
66
from importlib.resources import files # Python 3.9+
7-
except ImportError:
7+
except ImportError: # pragma: no cover
88
from importlib_resources import files # pip install importlib_resources
99

1010
from search_query.parser import parse
1111
from search_query.search_file import load_search_file
1212

13-
if typing.TYPE_CHECKING:
13+
if typing.TYPE_CHECKING: # pragma: no cover
1414
from search_query.query import Query
1515

1616
# mypy: disable-error-code=attr-defined

search_query/ebsco/linter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from search_query.ebsco.constants import VALID_FIELDS_REGEX
1414
from search_query.linter_base import QueryStringLinter
1515

16-
if typing.TYPE_CHECKING:
16+
if typing.TYPE_CHECKING: # pragma: no cover
1717
from search_query.query import Query
1818

1919

search_query/ebsco/serializer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import typing
66

77

8-
if typing.TYPE_CHECKING: # pragma: no
8+
if typing.TYPE_CHECKING: # pragma: no cover
99
from search_query.query import Query
1010

1111
# pylint: disable=too-few-public-methods

search_query/exception.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import typing
66

77

8-
if typing.TYPE_CHECKING:
8+
if typing.TYPE_CHECKING: # pragma: no cover
99
from search_query.linter_base import QueryStringLinter, QueryListLinter
1010

1111

search_query/generic/linter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from search_query.constants import Token
99
from search_query.linter_base import QueryStringLinter
1010

11-
if typing.TYPE_CHECKING:
11+
if typing.TYPE_CHECKING: # pragma: no cover
1212
from search_query.query import Query
1313

1414

search_query/linter_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from search_query.exception import QuerySyntaxError
2121
from search_query.utils import format_query_string_positions
2222

23-
if typing.TYPE_CHECKING:
23+
if typing.TYPE_CHECKING: # pragma: no cover
2424
from search_query.query import Query
2525

2626

search_query/parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from search_query.wos.parser import WOSListParser
1515
from search_query.wos.parser import WOSParser
1616

17-
if typing.TYPE_CHECKING:
17+
if typing.TYPE_CHECKING: # pragma: no cover
1818
from search_query.parser_base import QueryListParser
1919
from search_query.parser_base import QueryStringParser
2020

search_query/parser_base.py

Lines changed: 37 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from search_query.constants import TokenTypes
1414
from search_query.query import Query
1515

16-
if typing.TYPE_CHECKING:
16+
if typing.TYPE_CHECKING: # pragma: no cover
1717
from search_query.linter_base import QueryStringLinter
1818

1919

@@ -126,51 +126,51 @@ def get_token_str(self, token_nr: str) -> str:
126126
"get_token_str method must be implemented by inheriting classes"
127127
)
128128

129-
def _replace_token_nr_by_query(
130-
self, query_list: list, token_nr: str, token_content: dict
131-
) -> None:
132-
for i, (content, pos) in enumerate(query_list):
133-
token_str = self.get_token_str(token_nr)
134-
if token_str in content:
135-
query_list.pop(i)
136-
137-
content_before = content[: content.find(token_str)]
138-
content_before_pos = (pos[0], pos[0] + len(content_before))
139-
content_after = content[content.find(token_str) + len(token_str) :]
140-
content_after_pos = (
141-
content_before_pos[1] + len(token_str),
142-
content_before_pos[1] + len(content_after) + len(token_str),
143-
)
129+
# def _replace_token_nr_by_query(
130+
# self, query_list: list, token_nr: str, token_content: dict
131+
# ) -> None:
132+
# for i, (content, pos) in enumerate(query_list):
133+
# token_str = self.get_token_str(token_nr)
134+
# if token_str in content:
135+
# query_list.pop(i)
136+
137+
# content_before = content[: content.find(token_str)]
138+
# content_before_pos = (pos[0], pos[0] + len(content_before))
139+
# content_after = content[content.find(token_str) + len(token_str) :]
140+
# content_after_pos = (
141+
# content_before_pos[1] + len(token_str),
142+
# content_before_pos[1] + len(content_after) + len(token_str),
143+
# )
144144

145-
new_content = token_content["node_content"]
146-
new_pos = token_content["content_pos"]
145+
# new_content = token_content["node_content"]
146+
# new_pos = token_content["content_pos"]
147147

148-
if content_after:
149-
query_list.insert(i, (content_after, content_after_pos))
148+
# if content_after:
149+
# query_list.insert(i, (content_after, content_after_pos))
150150

151-
# Insert the sub-query from the list with "artificial parentheses"
152-
# (positions with length 0)
153-
query_list.insert(i, (")", (-1, -1)))
154-
query_list.insert(i, (new_content, new_pos))
155-
query_list.insert(i, ("(", (-1, -1)))
151+
# # Insert the sub-query from the list with "artificial parentheses"
152+
# # (positions with length 0)
153+
# query_list.insert(i, (")", (-1, -1)))
154+
# query_list.insert(i, (new_content, new_pos))
155+
# query_list.insert(i, ("(", (-1, -1)))
156156

157-
if content_before:
158-
query_list.insert(i, (content_before, content_before_pos))
157+
# if content_before:
158+
# query_list.insert(i, (content_before, content_before_pos))
159159

160-
break
160+
# break
161161

162-
def dict_to_positioned_list(self) -> list:
163-
"""Convert a node to a positioned list."""
162+
# def dict_to_positioned_list(self) -> list:
163+
# """Convert a node to a positioned list."""
164164

165-
root_node = list(self.query_dict.values())[-1]
166-
query_list = [(root_node["node_content"], root_node["content_pos"])]
165+
# root_node = list(self.query_dict.values())[-1]
166+
# query_list = [(root_node["node_content"], root_node["content_pos"])]
167167

168-
for token_nr, token_content in reversed(self.query_dict.items()):
169-
# iterate over query_list if token_nr is in the content,
170-
# split the content and insert the token_content, updating the content_pos
171-
self._replace_token_nr_by_query(query_list, token_nr, token_content)
168+
# for token_nr, token_content in reversed(self.query_dict.items()):
169+
# # iterate over query_list if token_nr is in the content,
170+
# # split the content and insert the token_content, updating the content_pos
171+
# self._replace_token_nr_by_query(query_list, token_nr, token_content)
172172

173-
return query_list
173+
# return query_list
174174

175175
@abstractmethod
176176
def parse(self) -> Query:

0 commit comments

Comments
 (0)