diff --git a/stubs/Pygments/pygments/__init__.pyi b/stubs/Pygments/pygments/__init__.pyi index 03d982778a29..2590f26165e7 100644 --- a/stubs/Pygments/pygments/__init__.pyi +++ b/stubs/Pygments/pygments/__init__.pyi @@ -13,10 +13,10 @@ __all__ = ["lex", "format", "highlight"] def lex(code: str, lexer: Lexer) -> Iterator[tuple[_TokenType, str]]: ... @overload -def format(tokens, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... +def format(tokens: Iterator[tuple[_TokenType, str]], formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... @overload -def format(tokens, formatter: Formatter[_T], outfile: None = None) -> _T: ... +def format(tokens: Iterator[tuple[_TokenType, str]], formatter: Formatter[_T], outfile: None = None) -> _T: ... @overload -def highlight(code, lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... +def highlight(code: str, lexer: Lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... @overload -def highlight(code, lexer, formatter: Formatter[_T], outfile: None = None) -> _T: ... +def highlight(code: str, lexer: Lexer, formatter: Formatter[_T], outfile: None = None) -> _T: ... diff --git a/stubs/Pygments/pygments/cmdline.pyi b/stubs/Pygments/pygments/cmdline.pyi index 446595b76ffb..cf2d6ddfa9ff 100644 --- a/stubs/Pygments/pygments/cmdline.pyi +++ b/stubs/Pygments/pygments/cmdline.pyi @@ -1,8 +1,10 @@ import argparse +import sys +from collections.abc import Sequence -def main_inner(parser, argns): ... +def main_inner(parser: argparse.ArgumentParser, argns: argparse.Namespace) -> int: ... class HelpFormatter(argparse.HelpFormatter): - def __init__(self, prog, indent_increment: int = 2, max_help_position: int = 16, width=None) -> None: ... + def __init__(self, prog: str, indent_increment: int = 2, max_help_position: int = 16, width: int | None = None) -> None: ... -def main(args=...): ... +def main(args: Sequence[str] = sys.argv) -> int: ... diff --git a/stubs/Pygments/pygments/console.pyi b/stubs/Pygments/pygments/console.pyi index 0de5c60fba94..8c580c52a4bf 100644 --- a/stubs/Pygments/pygments/console.pyi +++ b/stubs/Pygments/pygments/console.pyi @@ -1,10 +1,8 @@ -from typing import Any - esc: str -codes: Any -dark_colors: Any -light_colors: Any +codes: dict[str, str] +dark_colors: list[str] +light_colors: list[str] -def reset_color(): ... -def colorize(color_key, text): ... -def ansiformat(attr, text): ... +def reset_color() -> str: ... +def colorize(color_key: str, text: str) -> str: ... +def ansiformat(attr: str, text: str) -> str: ... diff --git a/stubs/Pygments/pygments/filter.pyi b/stubs/Pygments/pygments/filter.pyi index d11b994f87fa..155b778dc919 100644 --- a/stubs/Pygments/pygments/filter.pyi +++ b/stubs/Pygments/pygments/filter.pyi @@ -1,18 +1,20 @@ -from collections.abc import Iterable, Iterator +from collections.abc import Callable, Generator, Iterable, Iterator from typing import Any from pygments.lexer import Lexer from pygments.token import _TokenType -def apply_filters(stream, filters, lexer=None): ... -def simplefilter(f): ... +def apply_filters( + stream: Callable[[], Iterator[tuple[_TokenType, str]]], filters: list[Filter], lexer: Lexer | None = None +) -> Generator[tuple[_TokenType, str], None, tuple[_TokenType, str]]: ... +def simplefilter(f: Callable[..., Any]) -> type[FunctionFilter]: ... class Filter: options: Any - def __init__(self, **options) -> None: ... + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class FunctionFilter(Filter): - function: Any - def __init__(self, **options) -> None: ... + function: Callable[..., Any] | None = None + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... diff --git a/stubs/Pygments/pygments/filters/__init__.pyi b/stubs/Pygments/pygments/filters/__init__.pyi index 05325c8c63d2..8cfff70f3b55 100644 --- a/stubs/Pygments/pygments/filters/__init__.pyi +++ b/stubs/Pygments/pygments/filters/__init__.pyi @@ -1,58 +1,58 @@ -from collections.abc import Generator, Iterable, Iterator +from collections.abc import Callable, Generator, Iterable, Iterator from typing import Any from pygments.filter import Filter from pygments.lexer import Lexer from pygments.token import _TokenType -def find_filter_class(filtername): ... -def get_filter_by_name(filtername, **options): ... +def find_filter_class(filtername: str) -> type[Filter]: ... +def get_filter_by_name(filtername: str, **options: Any) -> Filter: ... def get_all_filters() -> Generator[str, None, None]: ... class CodeTagFilter(Filter): tag_re: Any - def __init__(self, **options) -> None: ... + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class SymbolFilter(Filter): - latex_symbols: Any - isabelle_symbols: Any - lang_map: Any - symbols: Any - def __init__(self, **options) -> None: ... + latex_symbols: dict[str, str] + isabelle_symbols: dict[str, str] + lang_map: dict[str, dict[str, str]] + symbols: dict[str, str] + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class KeywordCaseFilter(Filter): - convert: Any - def __init__(self, **options) -> None: ... + convert: Callable[[str], str] + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class NameHighlightFilter(Filter): - names: Any - tokentype: Any - def __init__(self, **options) -> None: ... + names: set[str] + tokentype: _TokenType + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class ErrorToken(Exception): ... class RaiseOnErrorTokenFilter(Filter): - exception: Any - def __init__(self, **options) -> None: ... + exception: type[Exception] + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class VisibleWhitespaceFilter(Filter): - wstt: Any - def __init__(self, **options) -> None: ... + wstt: bool + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class GobbleFilter(Filter): - n: Any - def __init__(self, **options) -> None: ... - def gobble(self, value, left): ... + n: int + def __init__(self, **options: Any) -> None: ... + def gobble(self, value: str, left: int) -> tuple[str, int]: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... class TokenMergeFilter(Filter): - def __init__(self, **options) -> None: ... + def __init__(self, **options: Any) -> None: ... def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... -FILTERS: Any +FILTERS: dict[str, type[Filter]] diff --git a/stubs/Pygments/pygments/formatter.pyi b/stubs/Pygments/pygments/formatter.pyi index f13948978eb4..970ce589541d 100644 --- a/stubs/Pygments/pygments/formatter.pyi +++ b/stubs/Pygments/pygments/formatter.pyi @@ -1,22 +1,31 @@ +from _io import _TextIOBase +from collections.abc import Iterator +from types import GenericAlias from typing import Any, Generic, TypeVar, overload +from pygments.style import Style +from pygments.token import _TokenType + +__all__ = ["Formatter"] + _T = TypeVar("_T", str, bytes) class Formatter(Generic[_T]): - name: Any - aliases: Any - filenames: Any + name: str | None = None + aliases: list[str] + filenames: list[str] unicodeoutput: bool - style: Any - full: Any - title: Any - encoding: Any - options: Any + style: type[Style] + full: bool + title: str + encoding: str + options: dict[str, Any] @overload - def __init__(self: Formatter[str], *, encoding: None = None, outencoding: None = None, **options) -> None: ... + def __init__(self: Formatter[str], *, encoding: None = None, outencoding: None = None, **options: Any) -> None: ... @overload - def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = None, **options) -> None: ... + def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = None, **options: Any) -> None: ... @overload - def __init__(self: Formatter[bytes], *, encoding: None = None, outencoding: str, **options) -> None: ... - def get_style_defs(self, arg: str = ""): ... - def format(self, tokensource, outfile): ... + def __init__(self: Formatter[bytes], *, encoding: None = None, outencoding: str, **options: Any) -> None: ... + def get_style_defs(self, arg: str = "") -> str: ... + def format(self, tokensource: Iterator[tuple[_TokenType, str]], outfile: _TextIOBase) -> None: ... + def __class_getitem__(cls, name: Any) -> GenericAlias: ... diff --git a/stubs/Pygments/pygments/formatters/__init__.pyi b/stubs/Pygments/pygments/formatters/__init__.pyi index 09fc071c813c..836dfd77da76 100644 --- a/stubs/Pygments/pygments/formatters/__init__.pyi +++ b/stubs/Pygments/pygments/formatters/__init__.pyi @@ -20,6 +20,7 @@ from .terminal import TerminalFormatter as TerminalFormatter from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalTrueColorFormatter as TerminalTrueColorFormatter def get_all_formatters() -> Generator[type[Formatter[Any]], None, None]: ... -def get_formatter_by_name(_alias, **options): ... -def load_formatter_from_file(filename, formattername: str = "CustomFormatter", **options): ... -def get_formatter_for_filename(fn, **options): ... +def find_formatter_class(alias: str) -> type[Formatter[Any]]: ... +def get_formatter_by_name(_alias: str, **options: Any) -> Formatter[Any]: ... +def load_formatter_from_file(filename: str, formattername: str = "CustomFormatter", **options: Any) -> Formatter[Any]: ... +def get_formatter_for_filename(fn: str, **options: Any) -> Formatter[Any]: ... diff --git a/stubs/Pygments/pygments/formatters/_mapping.pyi b/stubs/Pygments/pygments/formatters/_mapping.pyi index a9e5864b9cfe..1ab6217ebe2b 100644 --- a/stubs/Pygments/pygments/formatters/_mapping.pyi +++ b/stubs/Pygments/pygments/formatters/_mapping.pyi @@ -1,3 +1 @@ -from typing import Any - -FORMATTERS: Any +FORMATTERS: dict[str, tuple[str, str, tuple[str, ...], tuple[str, ...], str]] diff --git a/stubs/Pygments/pygments/formatters/img.pyi b/stubs/Pygments/pygments/formatters/img.pyi index 67310cfd2e2a..2623505303f0 100644 --- a/stubs/Pygments/pygments/formatters/img.pyi +++ b/stubs/Pygments/pygments/formatters/img.pyi @@ -1,4 +1,4 @@ -from typing import Any, TypeVar +from typing import Any, NoReturn, TypeVar from pygments.formatter import Formatter @@ -46,7 +46,8 @@ class ImageFormatter(Formatter[_T]): hl_lines: Any hl_color: Any drawables: Any - def get_style_defs(self, arg: str = "") -> None: ... + # raises NotImplementedError + def get_style_defs(self, arg: str = "") -> NoReturn: ... def format(self, tokensource, outfile) -> None: ... class GifImageFormatter(ImageFormatter[_T]): diff --git a/stubs/Pygments/pygments/lexer.pyi b/stubs/Pygments/pygments/lexer.pyi index 9d760b95b206..8144403a4d36 100644 --- a/stubs/Pygments/pygments/lexer.pyi +++ b/stubs/Pygments/pygments/lexer.pyi @@ -1,13 +1,34 @@ +import re from _typeshed import Incomplete -from collections.abc import Iterable, Iterator, Sequence +from collections.abc import Callable, Iterable, Iterator, Sequence from re import RegexFlag -from typing import ClassVar +from typing import Any, ClassVar +from typing_extensions import TypeAlias +from pygments.filter import Filter from pygments.token import _TokenType from pygments.util import Future +__all__ = [ + "Lexer", + "RegexLexer", + "ExtendedRegexLexer", + "DelegatingLexer", + "LexerContext", + "include", + "inherit", + "bygroups", + "using", + "this", + "default", + "words", + "line_re", +] + +line_re: re.Pattern[str] + class LexerMeta(type): - def __new__(cls, name, bases, d): ... + def __new__(cls, name: str, bases: tuple[type, ...], d: dict[str, Any]): ... def analyse_text(self, text: str) -> float: ... # actually defined in class Lexer # ClassVars of Lexer, but same situation as with StyleMeta and Style name: str @@ -19,83 +40,137 @@ class LexerMeta(type): url: str | None class Lexer(metaclass=LexerMeta): - options: Incomplete - stripnl: Incomplete - stripall: Incomplete - ensurenl: Incomplete - tabsize: Incomplete - encoding: Incomplete - filters: Incomplete - def __init__(self, **options) -> None: ... - def add_filter(self, filter_, **options) -> None: ... + options: dict[str, Any] + stripnl: bool + stripall: bool + ensurenl: bool + tabsize: int + encoding: str + filters: list[Filter] + def __init__(self, **options: Any) -> None: ... + def add_filter(self, filter_: str | Filter, **options: Any) -> None: ... def get_tokens(self, text: str, unfiltered: bool = False) -> Iterator[tuple[_TokenType, str]]: ... def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... class DelegatingLexer(Lexer): - root_lexer: Incomplete - language_lexer: Incomplete + root_lexer: Lexer + language_lexer: Lexer needle: Incomplete - def __init__(self, _root_lexer, _language_lexer, _needle=..., **options) -> None: ... + def __init__( + self, _root_lexer: type[Lexer], _language_lexer: type[Lexer], _needle: _TokenType = ..., **options: Any + ) -> None: ... def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... class include(str): ... class _inherit: ... -inherit: Incomplete +inherit: _inherit -class combined(tuple[Incomplete, ...]): - def __new__(cls, *args): ... - def __init__(self, *args) -> None: ... +class combined(tuple[str, ...]): + def __new__(cls, *args: str): ... + def __init__(self, *args: str) -> None: ... class _PseudoMatch: - def __init__(self, start, text) -> None: ... - def start(self, arg=None): ... - def end(self, arg=None): ... - def group(self, arg=None): ... - def groups(self): ... - def groupdict(self): ... + def __init__(self, start: int, text: str) -> None: ... + def start(self, arg=None) -> int: ... + def end(self, arg=None) -> int: ... + def group(self, arg=None) -> str: ... + def groups(self) -> tuple[str]: ... + def groupdict(self) -> dict[str, Any]: ... -def bygroups(*args): ... +def bygroups( + *args: _TokenType | Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]] +) -> Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]]: ... class _This: ... -this: Incomplete +this: _This -def using(_other, **kwargs): ... +def using( + _other: _This | Lexer, **kwargs: Any +) -> Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]]: ... class default: - state: Incomplete - def __init__(self, state) -> None: ... + state: str + def __init__(self, state: str) -> None: ... class words(Future): - words: Incomplete - prefix: Incomplete - suffix: Incomplete - def __init__(self, words, prefix: str = "", suffix: str = "") -> None: ... - def get(self): ... + words: Sequence[str] + prefix: str + suffix: str + def __init__(self, words: Sequence[str], prefix: str = "", suffix: str = "") -> None: ... + def get(self) -> str: ... class RegexLexerMeta(LexerMeta): - def process_tokendef(cls, name, tokendefs=None): ... - def get_tokendefs(cls): ... - def __call__(cls, *args, **kwds): ... + def process_tokendef( + cls, + name: str, + tokendefs: ( + dict[ + str, + list[ + tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]] + | tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str] + ], + ] + | None + ) = None, + ): ... + def get_tokendefs( + cls, + ) -> dict[ + str, + list[ + tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]] + | tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str] + ], + ]: ... + def __call__(cls, *args: Any, **kwds: Any) -> Any: ... + +_TokenListSecondItemType: TypeAlias = ( + _TokenType + | Iterator[tuple[int, _TokenType, str]] + | Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]] +) class RegexLexer(Lexer, metaclass=RegexLexerMeta): flags: ClassVar[RegexFlag] - tokens: ClassVar[dict[str, list[Incomplete]]] + tokens: ClassVar[ + dict[ + str, + list[ + tuple[str | words, _TokenListSecondItemType] + | tuple[str | words, _TokenListSecondItemType, str] + | include + | default + ], + ] + ] def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ("root",)) -> Iterator[tuple[int, _TokenType, str]]: ... class LexerContext: - text: Incomplete - pos: Incomplete - end: Incomplete - stack: Incomplete - def __init__(self, text, pos, stack=None, end=None) -> None: ... + text: str + pos: int + end: int + stack: list[str] + def __init__(self, text: str, pos: int, stack: list[str] | None = None, end: int | None = None) -> None: ... class ExtendedRegexLexer(RegexLexer): def get_tokens_unprocessed( # type: ignore[override] self, text: str | None = None, context: LexerContext | None = None ) -> Iterator[tuple[int, _TokenType, str]]: ... +def do_insertions( + insertions: list[tuple[int, list[tuple[int, _TokenType, str]]]], + tokens: dict[ + str, + list[ + tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]] + | tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str] + ], + ], +) -> Iterator[tuple[int, _TokenType, str]]: ... + class ProfilingRegexLexerMeta(RegexLexerMeta): ... class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta): diff --git a/stubs/Pygments/pygments/modeline.pyi b/stubs/Pygments/pygments/modeline.pyi index e13780553c31..155a339258af 100644 --- a/stubs/Pygments/pygments/modeline.pyi +++ b/stubs/Pygments/pygments/modeline.pyi @@ -1 +1,3 @@ -def get_filetype_from_buffer(buf, max_lines: int = 5): ... +__all__ = ["get_filetype_from_buffer"] + +def get_filetype_from_buffer(buf: str, max_lines: int = 5) -> str: ... diff --git a/stubs/Pygments/pygments/regexopt.pyi b/stubs/Pygments/pygments/regexopt.pyi index 0d5b90a96a2b..75a97ba1d9e0 100644 --- a/stubs/Pygments/pygments/regexopt.pyi +++ b/stubs/Pygments/pygments/regexopt.pyi @@ -1,8 +1,10 @@ -from typing import Any +import re +from collections.abc import Sequence +from operator import itemgetter -CS_ESCAPE: Any -FIRST_ELEMENT: Any +CS_ESCAPE: re.Pattern[str] +FIRST_ELEMENT: itemgetter[int] -def make_charset(letters): ... -def regex_opt_inner(strings, open_paren): ... -def regex_opt(strings, prefix: str = "", suffix: str = ""): ... +def make_charset(letters: Sequence[str]) -> str: ... +def regex_opt_inner(strings: Sequence[str], open_paren: str) -> str: ... +def regex_opt(strings: Sequence[str], prefix: str = "", suffix: str = "") -> str: ... diff --git a/stubs/Pygments/pygments/scanner.pyi b/stubs/Pygments/pygments/scanner.pyi index df5c2c886fff..812d8d8d60b4 100644 --- a/stubs/Pygments/pygments/scanner.pyi +++ b/stubs/Pygments/pygments/scanner.pyi @@ -1,19 +1,19 @@ -from typing import Any +import re class EndOfText(RuntimeError): ... class Scanner: - data: Any - data_length: Any + data: str + data_length: int start_pos: int pos: int - flags: Any - last: Any - match: Any - def __init__(self, text, flags: int = 0) -> None: ... + flags: int + last: str + match: str + def __init__(self, text: str, flags: int = 0) -> None: ... @property - def eos(self): ... - def check(self, pattern): ... - def test(self, pattern): ... - def scan(self, pattern): ... + def eos(self) -> bool: ... + def check(self, pattern: str) -> re.Match[str] | None: ... + def test(self, pattern: str) -> bool: ... + def scan(self, pattern: str) -> bool: ... def get_char(self) -> None: ... diff --git a/stubs/Pygments/pygments/sphinxext.pyi b/stubs/Pygments/pygments/sphinxext.pyi index cf43911fcfe6..add491fc8491 100644 --- a/stubs/Pygments/pygments/sphinxext.pyi +++ b/stubs/Pygments/pygments/sphinxext.pyi @@ -1,3 +1,4 @@ +from docutils.nodes import Node from docutils.parsers.rst import Directive MODULEDOC: str @@ -7,8 +8,12 @@ FILTERDOC: str class PygmentsDoc(Directive): filenames: set[str] + def run(self) -> list[Node]: ... + def document_lexers_overview(self) -> str: ... def document_lexers(self) -> str: ... def document_formatters(self) -> str: ... def document_filters(self) -> str: ... +# XXX: app is `sphinx.application.Sphinx`, but we cannot add `sphinx` as a requirement +# since it isn't a requirement of `pygments` itself def setup(app) -> None: ... diff --git a/stubs/Pygments/pygments/token.pyi b/stubs/Pygments/pygments/token.pyi index cd63f2b61158..c68b96790a8e 100644 --- a/stubs/Pygments/pygments/token.pyi +++ b/stubs/Pygments/pygments/token.pyi @@ -27,8 +27,8 @@ Operator: _TokenType Comment: _TokenType Generic: _TokenType -def is_token_subtype(ttype, other): ... -def string_to_tokentype(s): ... +def is_token_subtype(ttype: _TokenType, other: _TokenType) -> bool: ... +def string_to_tokentype(s: str | _TokenType) -> _TokenType: ... # dict, but shouldn't be mutated STANDARD_TYPES: Mapping[_TokenType, str] diff --git a/stubs/Pygments/pygments/unistring.pyi b/stubs/Pygments/pygments/unistring.pyi index 6dd2b3fcea26..f12118b788e5 100644 --- a/stubs/Pygments/pygments/unistring.pyi +++ b/stubs/Pygments/pygments/unistring.pyi @@ -34,5 +34,5 @@ xid_continue: str xid_start: str cats: Any -def combine(*args): ... -def allexcept(*args): ... +def combine(*args: str) -> str: ... +def allexcept(*args: str) -> str: ... diff --git a/stubs/Pygments/pygments/util.pyi b/stubs/Pygments/pygments/util.pyi index 963a810038a6..111b62870b25 100644 --- a/stubs/Pygments/pygments/util.pyi +++ b/stubs/Pygments/pygments/util.pyi @@ -1,34 +1,41 @@ +import re +from collections.abc import Callable, Iterable, Sequence from io import TextIOWrapper -from typing import Any +from typing import Any, TextIO, TypeVar -split_path_re: Any -doctype_lookup_re: Any -tag_re: Any -xml_decl_re: Any +split_path_re: re.Pattern[str] +doctype_lookup_re: re.Pattern[str] +tag_re: re.Pattern[str] +xml_decl_re: re.Pattern[str] class ClassNotFound(ValueError): ... class OptionError(Exception): ... -def get_choice_opt(options, optname, allowed, default=None, normcase: bool = False): ... -def get_bool_opt(options, optname, default=None): ... -def get_int_opt(options, optname, default=None): ... -def get_list_opt(options, optname, default=None): ... -def docstring_headline(obj): ... -def make_analysator(f): ... -def shebang_matches(text, regex): ... -def doctype_matches(text, regex): ... -def html_doctype_matches(text): ... -def looks_like_xml(text): ... -def surrogatepair(c): ... -def format_lines(var_name, seq, raw: bool = False, indent_level: int = 0): ... -def duplicates_removed(it, already_seen=()): ... +def get_choice_opt( + options: dict[str, Any], optname: str, allowed: Sequence[str], default: Any = None, normcase: bool = False +) -> str: ... +def get_bool_opt(options: dict[str, Any], optname: str, default: Any = None) -> bool: ... +def get_int_opt(options: dict[str, Any], optname: str, default: Any = None) -> int: ... +def get_list_opt(options: dict[str, Any], optname: str, default: Any = None) -> list[str]: ... +def docstring_headline(obj: Callable[..., Any]) -> str: ... +def make_analysator(f: Callable[[str], float]) -> Callable[[str], float]: ... +def shebang_matches(text: str, regex: str) -> bool: ... +def doctype_matches(text: str, regex: str) -> bool: ... +def html_doctype_matches(text: str) -> bool: ... +def looks_like_xml(text: str) -> bool: ... +def surrogatepair(c: int) -> tuple[int, int]: ... +def format_lines(var_name: str, seq: Sequence[str], raw: bool = False, indent_level: int = 0) -> str: ... + +_T = TypeVar("_T") + +def duplicates_removed(it: Iterable[_T], already_seen: tuple[_T, ...] = ()) -> list[_T]: ... class Future: - def get(self) -> None: ... + def get(self) -> str: ... -def guess_decode(text): ... -def guess_decode_from_terminal(text, term): ... -def terminal_encoding(term): ... +def guess_decode(text: bytes) -> tuple[bytes, str]: ... +def guess_decode_from_terminal(text: bytes, term: TextIO | Any) -> tuple[bytes, str]: ... +def terminal_encoding(term: TextIO | Any) -> str: ... class UnclosingTextIOWrapper(TextIOWrapper): def close(self) -> None: ...