Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tests/test_segmentcollection.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def test_get_segment_w_predicate():

def test_split_by():
def _serialize(collections: Iterable[RawSegmentCollection]) -> List[List[str]]:
lst = []
lst: list[str] = []
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is python 3.10 anyway...

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

https://peps.python.org/pep-0585/ is available since 3.9.

global_lst = []
for collection in collections:
if lst:
Expand Down
7 changes: 5 additions & 2 deletions tests/test_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Optional
import pytest

from pydifact.token import Token
Expand All @@ -22,12 +23,14 @@


@pytest.fixture
def tokenizer():
def tokenizer() -> Tokenizer:
return Tokenizer()


def _assert_tokens(
collection: str, expected: list = None, error_message: str = None
collection: str,
expected: Optional[list] = None,
error_message: Optional[str] = None,
) -> None:
"""Helper function to accelerate tokenizer testing."""

Expand Down