Skip to content

Commit 431fb36

Browse files
committed
add filters + formatters __init__.pyi
1 parent b3485bd commit 431fb36

File tree

4 files changed

+29
-30
lines changed

4 files changed

+29
-30
lines changed
Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,58 +1,58 @@
1-
from collections.abc import Generator, Iterable, Iterator
1+
from collections.abc import Callable, Generator, Iterable, Iterator
22
from typing import Any
33

44
from pygments.filter import Filter
55
from pygments.lexer import Lexer
66
from pygments.token import _TokenType
77

8-
def find_filter_class(filtername): ...
9-
def get_filter_by_name(filtername, **options): ...
8+
def find_filter_class(filtername: str) -> type[Filter]: ...
9+
def get_filter_by_name(filtername: str, **options: Any) -> Filter: ...
1010
def get_all_filters() -> Generator[str, None, None]: ...
1111

1212
class CodeTagFilter(Filter):
1313
tag_re: Any
14-
def __init__(self, **options) -> None: ...
14+
def __init__(self, **options: Any) -> None: ...
1515
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
1616

1717
class SymbolFilter(Filter):
18-
latex_symbols: Any
19-
isabelle_symbols: Any
20-
lang_map: Any
21-
symbols: Any
22-
def __init__(self, **options) -> None: ...
18+
latex_symbols: dict[str, str]
19+
isabelle_symbols: dict[str, str]
20+
lang_map: dict[str, dict[str, str]]
21+
symbols: dict[str, str]
22+
def __init__(self, **options: Any) -> None: ...
2323
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
2424

2525
class KeywordCaseFilter(Filter):
26-
convert: Any
27-
def __init__(self, **options) -> None: ...
26+
convert: Callable[[str], str]
27+
def __init__(self, **options: Any) -> None: ...
2828
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
2929

3030
class NameHighlightFilter(Filter):
31-
names: Any
32-
tokentype: Any
33-
def __init__(self, **options) -> None: ...
31+
names: set[str]
32+
tokentype: _TokenType
33+
def __init__(self, **options: Any) -> None: ...
3434
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
3535

3636
class ErrorToken(Exception): ...
3737

3838
class RaiseOnErrorTokenFilter(Filter):
39-
exception: Any
40-
def __init__(self, **options) -> None: ...
39+
exception: type[Exception]
40+
def __init__(self, **options: Any) -> None: ...
4141
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
4242

4343
class VisibleWhitespaceFilter(Filter):
44-
wstt: Any
45-
def __init__(self, **options) -> None: ...
44+
wstt: bool
45+
def __init__(self, **options: Any) -> None: ...
4646
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
4747

4848
class GobbleFilter(Filter):
49-
n: Any
50-
def __init__(self, **options) -> None: ...
51-
def gobble(self, value, left): ...
49+
n: int
50+
def __init__(self, **options: Any) -> None: ...
51+
def gobble(self, value: str, left: int) -> tuple[str, int]: ...
5252
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
5353

5454
class TokenMergeFilter(Filter):
55-
def __init__(self, **options) -> None: ...
55+
def __init__(self, **options: Any) -> None: ...
5656
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
5757

58-
FILTERS: Any
58+
FILTERS: dict[str, type[Filter]]

stubs/Pygments/pygments/formatters/__init__.pyi

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ from .terminal import TerminalFormatter as TerminalFormatter
2020
from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalTrueColorFormatter as TerminalTrueColorFormatter
2121

2222
def get_all_formatters() -> Generator[type[Formatter[Any]], None, None]: ...
23-
def get_formatter_by_name(_alias, **options): ...
24-
def load_formatter_from_file(filename, formattername: str = "CustomFormatter", **options): ...
25-
def get_formatter_for_filename(fn, **options): ...
23+
def find_formatter_class(alias: str) -> type[Formatter[Any]]: ...
24+
def get_formatter_by_name(_alias: str, **options: Any) -> Formatter[Any]: ...
25+
def load_formatter_from_file(filename: str, formattername: str = "CustomFormatter", **options: Any) -> Formatter[Any]: ...
26+
def get_formatter_for_filename(fn: str, **options: Any) -> Formatter[Any]: ...
Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1 @@
1-
from typing import Any
2-
3-
FORMATTERS: Any
1+
FORMATTERS: dict[str, tuple[str, str, tuple[str, ...], tuple[str, ...], str]]

stubs/Pygments/pygments/lexer.pyi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ class Lexer(metaclass=LexerMeta):
3131
encoding: str
3232
filters: list[Filter]
3333
def __init__(self, **options: Any) -> None: ...
34-
def add_filter(self, filter_: Filter, **options: Any) -> None: ...
34+
def add_filter(self, filter_: str | Filter, **options: Any) -> None: ...
3535
def get_tokens(self, text: str, unfiltered: bool = False) -> Iterator[tuple[_TokenType, str]]: ...
3636
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
3737

0 commit comments

Comments
 (0)